Open In Colab

Purpose¶

This notebook aims at training a covid classifier which detect following

  1. person
  2. mask
  3. nomask
In [ ]:
''' Essential '''
import sys
import logging as log
import matplotlib.pyplot as plt
import os
import cv2
import json
import pandas as pd
import numpy as np
import shutil

log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)

from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive

Download datasets (3)¶

In [ ]:
''' Link to explain how to download Datasets from kaggle https://www.kaggle.com/general/74235'''
!pip install -q kaggle
!mkdir ~/.kaggle
!cp '/content/drive/My Drive/Kaggle/kaggle.json' ~/.kaggle/
!chmod 600 ~/.kaggle/kaggle.json
# !kaggle datasets list
In [ ]:
%%time
!kaggle datasets download -d wobotintelligence/face-mask-detection-dataset -p dataset
!unzip dataset/face-mask-detection-dataset.zip -d dataset/face-mask-detection-dataset > /dev/null
!rm dataset/face-mask-detection-dataset.zip

!kaggle datasets download -d ivandanilovich/medical-masks-dataset-images-tfrecords -p dataset
!unzip dataset/medical-masks-dataset-images-tfrecords.zip -d dataset/medical-masks-dataset > /dev/null
!rm dataset/medical-masks-dataset-images-tfrecords.zip

!kaggle datasets download -d abdelaliezzyn/medical-masks -p dataset
!unzip dataset/medical-masks.zip -d dataset/medical-masks > /dev/null
!rm dataset/medical-masks.zip
Downloading face-mask-detection-dataset.zip to dataset
100% 2.49G/2.50G [00:36<00:00, 45.6MB/s]
100% 2.50G/2.50G [00:37<00:00, 72.2MB/s]
Downloading medical-masks-dataset-images-tfrecords.zip to dataset
 97% 227M/234M [00:03<00:00, 75.1MB/s]
100% 234M/234M [00:03<00:00, 74.9MB/s]
Downloading medical-masks.zip to dataset
100% 5.65G/5.65G [02:31<00:00, 33.7MB/s]
100% 5.65G/5.65G [02:31<00:00, 40.1MB/s]
CPU times: user 1.43 s, sys: 313 ms, total: 1.75 s
Wall time: 7min 32s

Reading dataset 1¶

In [ ]:
import os
import json
from collections import defaultdict
import pandas as pd

image_dir = "/content/dataset/face-mask-detection-dataset/Medical mask/Medical mask/Medical Mask/images"
image_files = [os.path.join(image_dir, x) for x in os.listdir(image_dir)]
print("Total Files(images):", len(image_files))

annotatations_dir = "/content/dataset/face-mask-detection-dataset/Medical mask/Medical mask/Medical Mask/annotations"
annotatations_files = [os.path.join(annotatations_dir, x) for x in os.listdir(annotatations_dir)]
print("Total Files(annotations):", len(annotatations_files))

dict_dataset = {}

def generate_dataset(filepath):
    json_data = json.load(open(filepath))
    for annotation in json_data['Annotations']:
        d = {}
        d['filepath'] = filepath.replace('annotations', 'images').replace('.json', "")
        d['classname'] = annotation['classname']
        d['bbox'] = annotation['BoundingBox']
        yield d

dataset = []
for cntr, annotatations_file in enumerate(annotatations_files):
    for data in generate_dataset(annotatations_file):
        if not data:
            continue
        dataset.append(data)

df_1 = pd.DataFrame(dataset)
df_1
Total Files(images): 6024
Total Files(annotations): 4326
Out[ ]:
filepath classname bbox
0 /content/dataset/face-mask-detection-dataset/M... face_with_mask [1159, 604, 1266, 756]
1 /content/dataset/face-mask-detection-dataset/M... face_with_mask [882, 574, 992, 711]
2 /content/dataset/face-mask-detection-dataset/M... face_with_mask [742, 409, 809, 504]
3 /content/dataset/face-mask-detection-dataset/M... face_with_mask_incorrect [1789, 236, 1906, 384]
4 /content/dataset/face-mask-detection-dataset/M... mask_surgical [1796, 316, 1871, 384]
... ... ... ...
15407 /content/dataset/face-mask-detection-dataset/M... face_other_covering [145, 82, 264, 210]
15408 /content/dataset/face-mask-detection-dataset/M... face_other_covering [522, 133, 629, 274]
15409 /content/dataset/face-mask-detection-dataset/M... eyeglasses [220, 130, 266, 152]
15410 /content/dataset/face-mask-detection-dataset/M... face_with_mask [291, 8, 638, 498]
15411 /content/dataset/face-mask-detection-dataset/M... mask_colorful [323, 248, 628, 496]

15412 rows × 3 columns

Dataset 2¶

In [ ]:
''' Medical Mask v 1'''

import cv2
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
# from mtcnn.mtcnn import MTCNN

# print(annotatations_files[0:2])
def generate_dataset(label_file, display=True):
    img_file = label_file.replace('labels', 'images').replace('xml', 'jpg')
    root = ET.parse(label_file).getroot()
    for i, type_tag in enumerate(root.findall('object')):
        try:
            classname, x1, x2, y1, y2 = None, None, None, None, None
            for child in type_tag:
                if child.tag == 'name':
                    classname = child.text
                elif child.tag == "bndbox":
                    for subchild in child:
                        if subchild.tag == "xmin":
                              x1 = int(subchild.text)
                        elif subchild.tag == "xmax":
                              x2 = int(subchild.text)
                        elif subchild.tag == "ymin":
                              y1 = int(subchild.text)
                        elif subchild.tag == "ymax":
                              y2 = int(subchild.text)
            if x1 > x2:
                x1, x2 = x2, x1
            if y1 > y2:
                y1, y2 = y2, y1
            b = 20

            x1, x2, y1, y2 = x1 - b, x2 + b, y1 - b, y2
            x1 = max(0, x1)
            y1 = max(0, y1)

            d = {}
            d['filepath'] = img_file
            d['classname'] = classname
            d['bbox'] = [x1, y1, x2, y2]
            yield d
        except Exception as err:
            print(label_file)

dir_labels = "dataset/_labeled_tmp"
if not os.path.exists(dir_labels):
    os.makedirs(dir_labels)

dataset_dir = "/content/dataset/medical-masks-dataset/medical-masks-dataset"
labels_files = [os.path.join(dataset_dir, 'labels', x) for x in os.listdir(os.path.join(dataset_dir, 'labels'))]

# fileid = "2756.png"
import traceback
print(len(labels_files))
dataset = []
for cntr, labels_file in enumerate(labels_files):
    for data in generate_dataset(labels_file):
        if not data:
            continue
        dataset.append(data)

df_2 = pd.DataFrame(dataset)
df_2
1148
Out[ ]:
filepath classname bbox
0 /content/dataset/medical-masks-dataset/medical... mask [322, 130, 413, 225]
1 /content/dataset/medical-masks-dataset/medical... mask [494, 107, 590, 185]
2 /content/dataset/medical-masks-dataset/medical... none [116, 41, 228, 137]
3 /content/dataset/medical-masks-dataset/medical... mask [324, 156, 437, 249]
4 /content/dataset/medical-masks-dataset/medical... mask [502, 163, 614, 256]
... ... ... ...
5086 /content/dataset/medical-masks-dataset/medical... mask [1198, 108, 1371, 238]
5087 /content/dataset/medical-masks-dataset/medical... mask [77, 132, 379, 497]
5088 /content/dataset/medical-masks-dataset/medical... mask [396, 119, 605, 375]
5089 /content/dataset/medical-masks-dataset/medical... mask [560, 116, 698, 273]
5090 /content/dataset/medical-masks-dataset/medical... mask [647, 78, 774, 268]

5091 rows × 3 columns

Dataset 3¶

In [ ]:
''' Medical Mask v 1'''

import cv2
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
# from mtcnn.mtcnn import MTCNN

# print(annotatations_files[0:2])
def generate_dataset(label_file, display=True):
    img_file = label_file.replace('annotations', 'images').replace('xml', 'png')
    root = ET.parse(label_file).getroot()
    for i, type_tag in enumerate(root.findall('object')):
        try:
            classname, x1, x2, y1, y2 = None, None, None, None, None
            for child in type_tag:
                if child.tag == 'name':
                    classname = child.text
                elif child.tag == "bndbox":
                    for subchild in child:
                        if subchild.tag == "xmin":
                              x1 = int(subchild.text)
                        elif subchild.tag == "xmax":
                              x2 = int(subchild.text)
                        elif subchild.tag == "ymin":
                              y1 = int(subchild.text)
                        elif subchild.tag == "ymax":
                              y2 = int(subchild.text)
            if x1 > x2:
                x1, x2 = x2, x1
            if y1 > y2:
                y1, y2 = y2, y1
            b = 20

            x1, x2, y1, y2 = x1 - b, x2 + b, y1 - b, y2
            x1 = max(0, x1)
            y1 = max(0, y1)

            d = {}
            d['filepath'] = img_file
            d['classname'] = classname
            d['bbox'] = [x1, y1, x2, y2]
            yield d
        except Exception as err:
            print(label_file)

dir_labels = "dataset/_labeled_tmp"
if not os.path.exists(dir_labels):
    os.makedirs(dir_labels)

dataset_dir = "/content/dataset/medical-masks/medical_masks/train"
labels_files = [os.path.join(dataset_dir, 'annotations', x) for x in os.listdir(os.path.join(dataset_dir, 'annotations'))]

# fileid = "2756.png"
import traceback
print(len(labels_files))
dataset = []
for cntr, labels_file in enumerate(labels_files):
    for data in generate_dataset(labels_file):
        if not data:
            continue
        dataset.append(data)

df_3 = pd.DataFrame(dataset)
df_3
7782
Out[ ]:
filepath classname bbox
0 /content/dataset/medical-masks/medical_masks/t... none [474, 44, 598, 170]
1 /content/dataset/medical-masks/medical_masks/t... none [80, 29, 156, 82]
2 /content/dataset/medical-masks/medical_masks/t... none [54, 166, 196, 356]
3 /content/dataset/medical-masks/medical_masks/t... none [262, 54, 414, 228]
4 /content/dataset/medical-masks/medical_masks/t... none [446, 48, 572, 194]
... ... ... ...
18277 /content/dataset/medical-masks/medical_masks/t... none [34, 6, 145, 123]
18278 /content/dataset/medical-masks/medical_masks/t... none [116, 124, 258, 272]
18279 /content/dataset/medical-masks/medical_masks/t... none [394, 36, 538, 200]
18280 /content/dataset/medical-masks/medical_masks/t... none [744, 74, 910, 254]
18281 /content/dataset/medical-masks/medical_masks/t... none [386, 89, 513, 234]

18282 rows × 3 columns

Merge datasets¶

In [ ]:
df_2['classname'].value_counts()

df_1['classname'] = df_1['classname'].apply(lambda x: 'mask' if x == 'face_with_mask' else x)
df_1['classname'] = df_1['classname'].apply(lambda x: 'no_mask' if x == 'face_no_mask' else x)
df_2['classname'] = df_2['classname'].apply(lambda x: 'no_mask' if x == 'none' else x)
df_3['classname'] = df_3['classname'].apply(lambda x: 'mask' if x == 'good' else x)
df_3['classname'] = df_3['classname'].apply(lambda x: 'no_mask' if x == 'none' else x)
df_3['classname'].value_counts()

df_mask = pd.concat([df_1, df_2, df_3], axis=0)
In [ ]:
df_person = pd.read_pickle("/content/drive/My Drive/datasets/facemask/dataset_person_bbox.pkl")
df = pd.concat([df_person, df_mask], axis=0)
In [ ]:
print("mask:", len(df_mask['filepath'].unique()), 
      "person:", len(df_person['filepath'].unique()), 
      "combined:", len(df['filepath'].unique()))
mask: 13256 person: 13077 combined: 13256
In [ ]:
df['classname'].value_counts()
Out[ ]:
person                      31742
mask                        14745
no_mask                     13689
mask_surgical                2430
mask_colorful                1876
face_other_covering          1372
eyeglasses                    914
hat                           823
bad                           541
sunglasses                    358
hair_net                      287
scarf_bandana                 260
goggles                       192
helmet                        187
hijab_niqab                   173
face_shield                   160
hood                          159
face_with_mask_incorrect      150
poor                          147
balaclava_ski_mask            134
turban                         94
gas_mask                       55
other                          39
Name: classname, dtype: int64

Filter non tiff images | detectron throw error for these¶

In [ ]:
# !pip install -U fvcore
_EXIF_ORIENT = 274 
def _apply_exif_orientation(image):
    if not hasattr(image, "getexif"):
        return image
    exif = image.getexif()

    if exif is None:
        return image

    orientation = exif.get(_EXIF_ORIENT)
    method = {
        2: Image.FLIP_LEFT_RIGHT,
        3: Image.ROTATE_180,
        4: Image.FLIP_TOP_BOTTOM,
        5: Image.TRANSPOSE,
        6: Image.ROTATE_270,
        7: Image.TRANSVERSE,
        8: Image.ROTATE_90,
    }.get(orientation)

    if method is not None:
        return image.transpose(method)
    return image

from PIL import Image
from fvcore.common.file_io import PathManager
files_error = []
for filename in df['filepath'].unique().tolist():
    try:
        with PathManager.open(filename, "rb") as f:
            image = Image.open(f)
            _apply_exif_orientation(image)
            del image
    except Exception as err:
        print(err)
        print(filename)
        files_error.append(filename)
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_0398.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_2666.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_0310.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_4300.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_0143.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_val_0758.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_val_1344.png
not a TIFF file (header b'' not valid)
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_1787.png
In [ ]:
# print(files_error)
files_error = ['/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_0398.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_2666.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_0310.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_4300.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_0143.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_val_0758.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_val_1344.png', '/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_1787.png']
print(df.shape)
df = df[~df.filepath.isin(files_error)]
print(df.shape)
df.to_pickle("dataset_person_mask_bbox.pkl")
(70527, 3)
(70511, 3)
In [ ]:
# cp "dataset_person_mask_bbox.pkl" "/content/drive/My Drive/datasets/facemask/dataset_person_mask_bbox.pkl"
In [ ]:
import pandas as pd
df = pd.read_pickle("/content/drive/My Drive/datasets/facemask/dataset_person_mask_bbox.pkl")
In [ ]:
import random
classes = ['person', 'mask', 'no_mask']
df_m = df[df.classname.isin(classes)]

filepaths = df_m['filepath'].unique()
random.shuffle(filepaths)

total_files = len(filepaths)
split_at = int(total_files * .80) 
df_train = df_m[df_m.filepath.isin(filepaths[0:split_at])]
df_val = df_m[df_m.filepath.isin(filepaths[split_at:])]
print("df_train:", df_train.shape, "df_val:", df_val.shape)
print("================== Train =================== ")
print(df_train['classname'].value_counts())
print("================== Val =================== ")
print(df_val['classname'].value_counts())
df_train: (48385, 3) df_val: (11775, 3)
================== Train =================== 
person     25486
mask       11730
no_mask    11169
Name: classname, dtype: int64
================== Val =================== 
person     6248
mask       3013
no_mask    2514
Name: classname, dtype: int64
In [ ]:
len(df_train['filepath'].unique()), len(df_val['filepath'].unique())
Out[ ]:
(10581, 2646)
In [ ]:
import cv2
import matplotlib.pyplot as plt
img = cv2.imread("dataset/face-mask-detection-dataset/Medical mask/Medical mask/Medical Mask/images/2756.png")
plt.imshow(img)
plt.show()

converting dataset to model format¶

In [ ]:
import torch, torchvision
print(torch.__version__, torch.cuda.is_available())
!gcc --version
# install detectron2: (Colab has CUDA 10.1 + torch 1.6)
assert torch.__version__.startswith("1.6")
!pip install pyyaml==5.1 pycocotools>=2.0.1
!pip uninstall -y detectron2
!pip install detectron2 -f https://dl.fbaipublicfiles.com/detectron2/wheels/cu101/torch1.6/index.html
1.6.0+cu101 True
gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0
Copyright (C) 2017 Free Software Foundation, Inc.
This is free software; see the source for copying conditions.  There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.

WARNING: Skipping detectron2 as it is not installed.
Looking in links: https://dl.fbaipublicfiles.com/detectron2/wheels/cu101/torch1.6/index.html
Collecting detectron2
  Downloading https://dl.fbaipublicfiles.com/detectron2/wheels/cu101/torch1.6/detectron2-0.2.1%2Bcu101-cp36-cp36m-linux_x86_64.whl (6.6MB)
     |████████████████████████████████| 6.6MB 4.0MB/s 
Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from detectron2) (0.16.0)
Requirement already satisfied: cloudpickle in /usr/local/lib/python3.6/dist-packages (from detectron2) (1.3.0)
Requirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (from detectron2) (3.2.2)
Requirement already satisfied: pydot in /usr/local/lib/python3.6/dist-packages (from detectron2) (1.3.0)
Requirement already satisfied: tabulate in /usr/local/lib/python3.6/dist-packages (from detectron2) (0.8.7)
Requirement already satisfied: termcolor>=1.1 in /usr/local/lib/python3.6/dist-packages (from detectron2) (1.1.0)
Collecting Pillow>=7.1
  Downloading https://files.pythonhosted.org/packages/30/bf/92385b4262178ca22b34f82e0e09c2922eb351fe39f3cc7b8ba9ea555b41/Pillow-7.2.0-cp36-cp36m-manylinux1_x86_64.whl (2.2MB)
     |████████████████████████████████| 2.2MB 5.6MB/s 
Collecting fvcore>=0.1.1
  Downloading https://files.pythonhosted.org/packages/fa/04/a1c9c25ed552c292ff90ac8927a3888e237c09646ae705913087f6ca5a9d/fvcore-0.1.2.post20200926.tar.gz
Collecting yacs>=0.1.6
  Downloading https://files.pythonhosted.org/packages/38/4f/fe9a4d472aa867878ce3bb7efb16654c5d63672b86dc0e6e953a67018433/yacs-0.1.8-py3-none-any.whl
Collecting mock
  Downloading https://files.pythonhosted.org/packages/cd/74/d72daf8dff5b6566db857cfd088907bb0355f5dd2914c4b3ef065c790735/mock-4.0.2-py3-none-any.whl
Requirement already satisfied: tqdm>4.29.0 in /usr/local/lib/python3.6/dist-packages (from detectron2) (4.41.1)
Requirement already satisfied: pycocotools>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from detectron2) (2.0.2)
Requirement already satisfied: tensorboard in /usr/local/lib/python3.6/dist-packages (from detectron2) (2.3.0)
Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->detectron2) (1.2.0)
Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->detectron2) (2.8.1)
Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->detectron2) (2.4.7)
Requirement already satisfied: numpy>=1.11 in /usr/local/lib/python3.6/dist-packages (from matplotlib->detectron2) (1.18.5)
Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib->detectron2) (0.10.0)
Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.6/dist-packages (from fvcore>=0.1.1->detectron2) (5.1)
Collecting portalocker
  Downloading https://files.pythonhosted.org/packages/89/a6/3814b7107e0788040870e8825eebf214d72166adf656ba7d4bf14759a06a/portalocker-2.0.0-py2.py3-none-any.whl
Requirement already satisfied: cython>=0.27.3 in /usr/local/lib/python3.6/dist-packages (from pycocotools>=2.0.1->detectron2) (0.29.21)
Requirement already satisfied: setuptools>=18.0 in /usr/local/lib/python3.6/dist-packages (from pycocotools>=2.0.1->detectron2) (50.3.0)
Requirement already satisfied: protobuf>=3.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (3.12.4)
Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (2.23.0)
Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (1.15.0)
Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (1.7.0)
Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (1.17.2)
Requirement already satisfied: grpcio>=1.24.3 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (1.32.0)
Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (1.0.1)
Requirement already satisfied: wheel>=0.26; python_version >= "3" in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (0.35.1)
Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (3.2.2)
Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (0.4.1)
Requirement already satisfied: absl-py>=0.4 in /usr/local/lib/python3.6/dist-packages (from tensorboard->detectron2) (0.10.0)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard->detectron2) (2020.6.20)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard->detectron2) (2.10)
Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard->detectron2) (1.24.3)
Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard->detectron2) (3.0.4)
Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard->detectron2) (0.2.8)
Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard->detectron2) (4.1.1)
Requirement already satisfied: rsa<5,>=3.1.4; python_version >= "3" in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard->detectron2) (4.6)
Requirement already satisfied: importlib-metadata; python_version < "3.8" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard->detectron2) (1.7.0)
Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard->detectron2) (1.3.0)
Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.6/dist-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tensorboard->detectron2) (0.4.8)
Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < "3.8"->markdown>=2.6.8->tensorboard->detectron2) (3.1.0)
Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard->detectron2) (3.1.0)
Building wheels for collected packages: fvcore
  Building wheel for fvcore (setup.py) ... done
  Created wheel for fvcore: filename=fvcore-0.1.2.post20200926-cp36-none-any.whl size=43980 sha256=5932471e1470f26b74b0ee8750f0dc7ef5611ba142c367af8cce304e0923fcf5
  Stored in directory: /root/.cache/pip/wheels/84/bc/60/e932dde6feea467e8043f34dbd875ccbbff7469ef773aa72ec
Successfully built fvcore
ERROR: albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.9 which is incompatible.
Installing collected packages: Pillow, yacs, portalocker, fvcore, mock, detectron2
  Found existing installation: Pillow 7.0.0
    Uninstalling Pillow-7.0.0:
      Successfully uninstalled Pillow-7.0.0
Successfully installed Pillow-7.2.0 detectron2-0.2.1+cu101 fvcore-0.1.2.post20200926 mock-4.0.2 portalocker-2.0.0 yacs-0.1.8
In [ ]:
!cp -Rv "/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x" output
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x' -> 'output'
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x/metrics.json' -> 'output/metrics.json'
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x/events.out.tfevents.1601140189.42bfe2eb1383.883.0' -> 'output/events.out.tfevents.1601140189.42bfe2eb1383.883.0'
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x/model_final.pth' -> 'output/model_final.pth'
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x/last_checkpoint' -> 'output/last_checkpoint'
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x/covid_1_val_coco_format.json.lock' -> 'output/covid_1_val_coco_format.json.lock'
'/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x/covid_1_val_coco_format.json' -> 'output/covid_1_val_coco_format.json'
In [ ]:
import detectron2
from detectron2.utils.logger import setup_logger
setup_logger()

# import some common libraries
import numpy as np
import os, json, cv2, random
from google.colab.patches import cv2_imshow

# import some common detectron2 utilities
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from detectron2.config import get_cfg
from detectron2.utils.visualizer import Visualizer
from detectron2.data import MetadataCatalog, DatasetCatalog
""" Download pretrained models """

cfg = get_cfg()
model_yaml = "COCO-Detection/faster_rcnn_R_50_C4_3x.yaml"
# model_yaml = "COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml"

cfg.merge_from_file(model_zoo.get_config_file(model_yaml))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.9  # set threshold for this model

# Find a model from detectron2's model zoo. You can use the https://dl.fbaipublicfiles... url as well
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(model_yaml)
predictor = DefaultPredictor(cfg)
WARNING [09/26 19:45:37 d2.modeling.backbone.resnet]: ResNet.make_stage(first_stride=) is deprecated!  Use 'stride_per_block' or 'stride' instead.
[ INFO ] Loading checkpoint from https://dl.fbaipublicfiles.com/detectron2/COCO-Detection/faster_rcnn_R_50_C4_3x/137849393/model_final_f97cb7.pkl
[ INFO ] Reading a file from 'Detectron2 Model Zoo'
In [ ]:
img = cv2.imread(df_m['filepath'].tolist()[4])
outputs = predictor(img)
v = Visualizer(img[:, :, ::-1], MetadataCatalog.get(cfg.DATASETS.TRAIN[0]), scale=1.2)
v = v.draw_instance_predictions(outputs["instances"].to("cpu"))
cv2_imshow(v.get_image()[:, :, ::-1])
[ INFO ] NumExpr defaulting to 2 threads.
/usr/local/lib/python3.6/dist-packages/detectron2/modeling/roi_heads/fast_rcnn.py:111: UserWarning: This overload of nonzero is deprecated:
	nonzero()
Consider using one of the following signatures instead:
	nonzero(*, bool as_tuple) (Triggered internally at  /pytorch/torch/csrc/utils/python_arg_parser.cpp:766.)
  filter_inds = filter_mask.nonzero()

Converting dataset in detectron2 format¶

In [ ]:
%%time
from detectron2.structures import BoxMode
idx = 0
def get_dataset_dict(df):
    global idx
    dataset_dicts = []
    # for idx, v in enumerate(imgs_anns.values()):
    record = {}
    filename = df['filepath'].tolist()[0]
    height, width = cv2.imread(filename).shape[:2]        
    record["file_name"] = filename
    record["image_id"] = idx
    record["height"] = height
    record["width"] = width
    idx += 1
    objs = []
    for _, row in df.iterrows():
        x1, y1, x2, y2 = row['bbox']
        px = [x1, x2]
        py = [y1, y2]
        # poly = [(x + 0.5, y + 0.5) for x, y in zip(px, py)]
        poly = [(x1 + 0.5, y1 + 0.5), (x2, y1), (x2, y2), (x1, y2)]
        poly = [p for x in poly for p in x]

        # print(row['classname'], classes.index(row['classname']))
        obj = {
            "bbox": [np.min(px), np.min(py), np.max(px), np.max(py)],
            "bbox_mode": BoxMode.XYXY_ABS,
            "segmentation": [poly],
            "category_id": classes.index(row['classname']),
        }
        objs.append(obj)
    record["annotations"] = objs
    dataset_dicts.append(record)
    return dataset_dicts

dataset_dicts_all_t = []
for cntr, filepath in enumerate(df_train['filepath'].unique()):
    df_ = df_train[df_train['filepath'] == filepath]
    dataset_dicts = get_dataset_dict(df_)
    dataset_dicts_all_t.extend(dataset_dicts)

meta = "covid_2"
DatasetCatalog.register(meta + "_train", lambda: dataset_dicts_all_t)
MetadataCatalog.get(meta + "_train").set(thing_classes=classes)
train_metadata = MetadataCatalog.get(meta + "_train")

dataset_dicts_all_v = []
for cntr, filepath in enumerate(df_val['filepath'].unique()):
    df_ = df_val[df_val['filepath'] == filepath]
    dataset_dicts = get_dataset_dict(df_)
    dataset_dicts_all_v.extend(dataset_dicts)

DatasetCatalog.register(meta + "_val", lambda : dataset_dicts_all_v)
MetadataCatalog.get(meta + "_val").set(thing_classes=classes)

val_metadata = MetadataCatalog.get(meta + "_val")
CPU times: user 3min 59s, sys: 6.02 s, total: 4min 5s
Wall time: 4min 24s
In [ ]:
for d in random.sample(dataset_dicts_all_v, 3):
    img = cv2.imread(d["file_name"])
    visualizer = Visualizer(img[:, :, ::-1], metadata=val_metadata, scale=0.5)
    out = visualizer.draw_dataset_dict(d)
    cv2_imshow(out.get_image()[:, :, ::-1])
In [ ]:
# We need to disable flip augumentatiom, else person with cap will be treated as mask
from detectron2.data import detection_utils
detection_utils

# Comment Line 580 - 581
# if is_train:
#     augmentation.append(T.RandomFlip())
Out[ ]:
<module 'detectron2.data.detection_utils' from '/usr/local/lib/python3.6/dist-packages/detectron2/data/detection_utils.py'>
In [ ]:
from detectron2.engine import DefaultTrainer

# model_yaml = "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml"
model_yaml = "COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml"

cfg = get_cfg()
cfg.merge_from_file(model_zoo.get_config_file(model_yaml))

cfg.DATASETS.TRAIN = (meta + "_train",)
cfg.DATASETS.TEST = ()
# cfg.DATASETS.TEST = (meta + "_val",)

cfg.DATALOADER.NUM_WORKERS = 2
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(model_yaml)  # Let training initialize from model zoo
cfg.SOLVER.IMS_PER_BATCH = 2
cfg.SOLVER.BASE_LR = 0.00025  # pick a good LR
cfg.SOLVER.MAX_ITER = 10000    # 300 iterations seems good enough for this toy dataset; you will need to train longer for a practical dataset
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 128   # faster, and good enough for this dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = len(classes)  # only has one class (ballon)

os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
trainer = DefaultTrainer(cfg) 
trainer.resume_or_load(resume=True)
trainer.train()
[09/26 20:20:02 d2.engine.defaults]: Model:
GeneralizedRCNN(
  (backbone): FPN(
    (fpn_lateral2): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1))
    (fpn_output2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (fpn_lateral3): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1))
    (fpn_output3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (fpn_lateral4): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1))
    (fpn_output4): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (fpn_lateral5): Conv2d(2048, 256, kernel_size=(1, 1), stride=(1, 1))
    (fpn_output5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (top_block): LastLevelMaxPool()
    (bottom_up): ResNet(
      (stem): BasicStem(
        (conv1): Conv2d(
          3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False
          (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
        )
      )
      (res2): Sequential(
        (0): BottleneckBlock(
          (shortcut): Conv2d(
            64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv1): Conv2d(
            64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
          )
          (conv2): Conv2d(
            64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
          )
          (conv3): Conv2d(
            64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
        )
        (1): BottleneckBlock(
          (conv1): Conv2d(
            256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
          )
          (conv2): Conv2d(
            64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
          )
          (conv3): Conv2d(
            64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
        )
        (2): BottleneckBlock(
          (conv1): Conv2d(
            256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
          )
          (conv2): Conv2d(
            64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=64, eps=1e-05)
          )
          (conv3): Conv2d(
            64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
        )
      )
      (res3): Sequential(
        (0): BottleneckBlock(
          (shortcut): Conv2d(
            256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv1): Conv2d(
            256, 128, kernel_size=(1, 1), stride=(2, 2), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv2): Conv2d(
            128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv3): Conv2d(
            128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
        )
        (1): BottleneckBlock(
          (conv1): Conv2d(
            512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv2): Conv2d(
            128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv3): Conv2d(
            128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
        )
        (2): BottleneckBlock(
          (conv1): Conv2d(
            512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv2): Conv2d(
            128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv3): Conv2d(
            128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
        )
        (3): BottleneckBlock(
          (conv1): Conv2d(
            512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv2): Conv2d(
            128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=128, eps=1e-05)
          )
          (conv3): Conv2d(
            128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
        )
      )
      (res4): Sequential(
        (0): BottleneckBlock(
          (shortcut): Conv2d(
            512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
          (conv1): Conv2d(
            512, 256, kernel_size=(1, 1), stride=(2, 2), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv2): Conv2d(
            256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv3): Conv2d(
            256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
        )
        (1): BottleneckBlock(
          (conv1): Conv2d(
            1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv2): Conv2d(
            256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv3): Conv2d(
            256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
        )
        (2): BottleneckBlock(
          (conv1): Conv2d(
            1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv2): Conv2d(
            256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv3): Conv2d(
            256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
        )
        (3): BottleneckBlock(
          (conv1): Conv2d(
            1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv2): Conv2d(
            256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv3): Conv2d(
            256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
        )
        (4): BottleneckBlock(
          (conv1): Conv2d(
            1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv2): Conv2d(
            256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv3): Conv2d(
            256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
        )
        (5): BottleneckBlock(
          (conv1): Conv2d(
            1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv2): Conv2d(
            256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=256, eps=1e-05)
          )
          (conv3): Conv2d(
            256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=1024, eps=1e-05)
          )
        )
      )
      (res5): Sequential(
        (0): BottleneckBlock(
          (shortcut): Conv2d(
            1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False
            (norm): FrozenBatchNorm2d(num_features=2048, eps=1e-05)
          )
          (conv1): Conv2d(
            1024, 512, kernel_size=(1, 1), stride=(2, 2), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv2): Conv2d(
            512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv3): Conv2d(
            512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=2048, eps=1e-05)
          )
        )
        (1): BottleneckBlock(
          (conv1): Conv2d(
            2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv2): Conv2d(
            512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv3): Conv2d(
            512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=2048, eps=1e-05)
          )
        )
        (2): BottleneckBlock(
          (conv1): Conv2d(
            2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv2): Conv2d(
            512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=512, eps=1e-05)
          )
          (conv3): Conv2d(
            512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False
            (norm): FrozenBatchNorm2d(num_features=2048, eps=1e-05)
          )
        )
      )
    )
  )
  (proposal_generator): RPN(
    (rpn_head): StandardRPNHead(
      (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
      (objectness_logits): Conv2d(256, 3, kernel_size=(1, 1), stride=(1, 1))
      (anchor_deltas): Conv2d(256, 12, kernel_size=(1, 1), stride=(1, 1))
    )
    (anchor_generator): DefaultAnchorGenerator(
      (cell_anchors): BufferList()
    )
  )
  (roi_heads): StandardROIHeads(
    (box_pooler): ROIPooler(
      (level_poolers): ModuleList(
        (0): ROIAlign(output_size=(7, 7), spatial_scale=0.25, sampling_ratio=0, aligned=True)
        (1): ROIAlign(output_size=(7, 7), spatial_scale=0.125, sampling_ratio=0, aligned=True)
        (2): ROIAlign(output_size=(7, 7), spatial_scale=0.0625, sampling_ratio=0, aligned=True)
        (3): ROIAlign(output_size=(7, 7), spatial_scale=0.03125, sampling_ratio=0, aligned=True)
      )
    )
    (box_head): FastRCNNConvFCHead(
      (fc1): Linear(in_features=12544, out_features=1024, bias=True)
      (fc2): Linear(in_features=1024, out_features=1024, bias=True)
    )
    (box_predictor): FastRCNNOutputLayers(
      (cls_score): Linear(in_features=1024, out_features=4, bias=True)
      (bbox_pred): Linear(in_features=1024, out_features=12, bias=True)
    )
  )
)
[09/26 20:20:02 d2.data.build]: Removed 0 images with no usable annotations. 10581 images left.
[09/26 20:20:02 d2.data.common]: Serializing 10581 elements to byte tensors and concatenating them all ...
[09/26 20:20:03 d2.data.common]: Serialized dataset takes 11.87 MiB
[09/26 20:20:03 d2.data.dataset_mapper]: Augmentations used in training: [ResizeShortestEdge(short_edge_length=(640, 672, 704, 736, 768, 800), max_size=1333, sample_style='choice'), RandomFlip()]
[09/26 20:20:03 d2.data.build]: Using training sampler TrainingSampler
[ INFO ] Loading checkpoint from ./output/model_0004999.pth
[ INFO ] Loading optimizer from ./output/model_0004999.pth
[ INFO ] Loading scheduler from ./output/model_0004999.pth
[09/26 20:20:03 d2.engine.train_loop]: Starting training from iteration 5000
[09/26 20:20:12 d2.utils.events]:  eta: 0:37:54  iter: 5019  total_loss: 0.478  loss_cls: 0.154  loss_box_reg: 0.327  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4452  data_time: 0.0242  lr: 0.000250  max_mem: 7096M
[09/26 20:20:21 d2.utils.events]:  eta: 0:36:03  iter: 5039  total_loss: 0.424  loss_cls: 0.126  loss_box_reg: 0.279  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4343  data_time: 0.0158  lr: 0.000250  max_mem: 7096M
[09/26 20:20:29 d2.utils.events]:  eta: 0:35:45  iter: 5059  total_loss: 0.531  loss_cls: 0.155  loss_box_reg: 0.332  loss_rpn_cls: 0.002  loss_rpn_loc: 0.026  time: 0.4325  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:20:38 d2.utils.events]:  eta: 0:35:46  iter: 5079  total_loss: 0.626  loss_cls: 0.213  loss_box_reg: 0.377  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4380  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:20:47 d2.utils.events]:  eta: 0:35:57  iter: 5099  total_loss: 0.520  loss_cls: 0.124  loss_box_reg: 0.340  loss_rpn_cls: 0.007  loss_rpn_loc: 0.029  time: 0.4391  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:20:58 d2.utils.events]:  eta: 0:36:11  iter: 5119  total_loss: 0.506  loss_cls: 0.165  loss_box_reg: 0.321  loss_rpn_cls: 0.005  loss_rpn_loc: 0.032  time: 0.4584  data_time: 0.1051  lr: 0.000250  max_mem: 7096M
[09/26 20:21:07 d2.utils.events]:  eta: 0:36:08  iter: 5139  total_loss: 0.551  loss_cls: 0.151  loss_box_reg: 0.328  loss_rpn_cls: 0.005  loss_rpn_loc: 0.021  time: 0.4575  data_time: 0.0077  lr: 0.000250  max_mem: 7096M
[09/26 20:21:17 d2.utils.events]:  eta: 0:36:02  iter: 5159  total_loss: 0.671  loss_cls: 0.220  loss_box_reg: 0.394  loss_rpn_cls: 0.003  loss_rpn_loc: 0.026  time: 0.4576  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:21:25 d2.utils.events]:  eta: 0:35:53  iter: 5179  total_loss: 0.469  loss_cls: 0.138  loss_box_reg: 0.298  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4564  data_time: 0.0054  lr: 0.000250  max_mem: 7096M
[09/26 20:21:35 d2.utils.events]:  eta: 0:35:50  iter: 5199  total_loss: 0.548  loss_cls: 0.176  loss_box_reg: 0.344  loss_rpn_cls: 0.006  loss_rpn_loc: 0.025  time: 0.4561  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:21:43 d2.utils.events]:  eta: 0:35:35  iter: 5219  total_loss: 0.586  loss_cls: 0.170  loss_box_reg: 0.362  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4550  data_time: 0.0072  lr: 0.000250  max_mem: 7096M
[09/26 20:21:53 d2.utils.events]:  eta: 0:35:30  iter: 5239  total_loss: 0.549  loss_cls: 0.169  loss_box_reg: 0.359  loss_rpn_cls: 0.005  loss_rpn_loc: 0.023  time: 0.4554  data_time: 0.0046  lr: 0.000250  max_mem: 7096M
[09/26 20:22:02 d2.utils.events]:  eta: 0:35:23  iter: 5259  total_loss: 0.574  loss_cls: 0.158  loss_box_reg: 0.371  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4549  data_time: 0.0068  lr: 0.000250  max_mem: 7096M
[09/26 20:22:11 d2.utils.events]:  eta: 0:35:14  iter: 5279  total_loss: 0.516  loss_cls: 0.150  loss_box_reg: 0.346  loss_rpn_cls: 0.003  loss_rpn_loc: 0.028  time: 0.4545  data_time: 0.0096  lr: 0.000250  max_mem: 7096M
[09/26 20:22:20 d2.utils.events]:  eta: 0:35:15  iter: 5299  total_loss: 0.506  loss_cls: 0.162  loss_box_reg: 0.327  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4558  data_time: 0.0056  lr: 0.000250  max_mem: 7096M
[09/26 20:22:29 d2.utils.events]:  eta: 0:35:05  iter: 5319  total_loss: 0.525  loss_cls: 0.155  loss_box_reg: 0.338  loss_rpn_cls: 0.004  loss_rpn_loc: 0.019  time: 0.4552  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:22:38 d2.utils.events]:  eta: 0:34:50  iter: 5339  total_loss: 0.517  loss_cls: 0.141  loss_box_reg: 0.327  loss_rpn_cls: 0.003  loss_rpn_loc: 0.026  time: 0.4534  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:22:47 d2.utils.events]:  eta: 0:34:43  iter: 5359  total_loss: 0.467  loss_cls: 0.134  loss_box_reg: 0.285  loss_rpn_cls: 0.004  loss_rpn_loc: 0.020  time: 0.4538  data_time: 0.0083  lr: 0.000250  max_mem: 7096M
[09/26 20:22:56 d2.utils.events]:  eta: 0:34:34  iter: 5379  total_loss: 0.466  loss_cls: 0.125  loss_box_reg: 0.308  loss_rpn_cls: 0.005  loss_rpn_loc: 0.020  time: 0.4534  data_time: 0.0074  lr: 0.000250  max_mem: 7096M
[09/26 20:23:04 d2.utils.events]:  eta: 0:34:23  iter: 5399  total_loss: 0.438  loss_cls: 0.151  loss_box_reg: 0.293  loss_rpn_cls: 0.004  loss_rpn_loc: 0.018  time: 0.4524  data_time: 0.0068  lr: 0.000250  max_mem: 7096M
[09/26 20:23:13 d2.utils.events]:  eta: 0:34:14  iter: 5419  total_loss: 0.509  loss_cls: 0.166  loss_box_reg: 0.314  loss_rpn_cls: 0.004  loss_rpn_loc: 0.015  time: 0.4523  data_time: 0.0075  lr: 0.000250  max_mem: 7096M
[09/26 20:23:23 d2.utils.events]:  eta: 0:34:09  iter: 5439  total_loss: 0.430  loss_cls: 0.112  loss_box_reg: 0.287  loss_rpn_cls: 0.001  loss_rpn_loc: 0.026  time: 0.4529  data_time: 0.0078  lr: 0.000250  max_mem: 7096M
[09/26 20:23:32 d2.utils.events]:  eta: 0:34:00  iter: 5459  total_loss: 0.438  loss_cls: 0.116  loss_box_reg: 0.287  loss_rpn_cls: 0.002  loss_rpn_loc: 0.018  time: 0.4526  data_time: 0.0076  lr: 0.000250  max_mem: 7096M
[09/26 20:23:41 d2.utils.events]:  eta: 0:33:51  iter: 5479  total_loss: 0.429  loss_cls: 0.107  loss_box_reg: 0.305  loss_rpn_cls: 0.002  loss_rpn_loc: 0.032  time: 0.4529  data_time: 0.0124  lr: 0.000250  max_mem: 7096M
[09/26 20:23:50 d2.utils.events]:  eta: 0:33:42  iter: 5499  total_loss: 0.499  loss_cls: 0.144  loss_box_reg: 0.338  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4522  data_time: 0.0057  lr: 0.000250  max_mem: 7096M
[09/26 20:23:58 d2.utils.events]:  eta: 0:33:29  iter: 5519  total_loss: 0.544  loss_cls: 0.142  loss_box_reg: 0.347  loss_rpn_cls: 0.004  loss_rpn_loc: 0.025  time: 0.4515  data_time: 0.0073  lr: 0.000250  max_mem: 7096M
[09/26 20:24:07 d2.utils.events]:  eta: 0:33:18  iter: 5539  total_loss: 0.507  loss_cls: 0.137  loss_box_reg: 0.332  loss_rpn_cls: 0.003  loss_rpn_loc: 0.020  time: 0.4515  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:24:16 d2.utils.events]:  eta: 0:33:07  iter: 5559  total_loss: 0.538  loss_cls: 0.166  loss_box_reg: 0.312  loss_rpn_cls: 0.004  loss_rpn_loc: 0.028  time: 0.4512  data_time: 0.0074  lr: 0.000250  max_mem: 7096M
[09/26 20:24:25 d2.utils.events]:  eta: 0:33:02  iter: 5579  total_loss: 0.457  loss_cls: 0.128  loss_box_reg: 0.314  loss_rpn_cls: 0.002  loss_rpn_loc: 0.018  time: 0.4515  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:24:35 d2.utils.events]:  eta: 0:32:57  iter: 5599  total_loss: 0.558  loss_cls: 0.171  loss_box_reg: 0.359  loss_rpn_cls: 0.004  loss_rpn_loc: 0.031  time: 0.4522  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:24:44 d2.utils.events]:  eta: 0:32:51  iter: 5619  total_loss: 0.469  loss_cls: 0.151  loss_box_reg: 0.317  loss_rpn_cls: 0.004  loss_rpn_loc: 0.030  time: 0.4530  data_time: 0.0184  lr: 0.000250  max_mem: 7096M
[09/26 20:24:54 d2.utils.events]:  eta: 0:32:43  iter: 5639  total_loss: 0.459  loss_cls: 0.132  loss_box_reg: 0.312  loss_rpn_cls: 0.004  loss_rpn_loc: 0.021  time: 0.4530  data_time: 0.0075  lr: 0.000250  max_mem: 7096M
[09/26 20:25:02 d2.utils.events]:  eta: 0:32:31  iter: 5659  total_loss: 0.400  loss_cls: 0.091  loss_box_reg: 0.267  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4522  data_time: 0.0056  lr: 0.000250  max_mem: 7096M
[09/26 20:25:11 d2.utils.events]:  eta: 0:32:23  iter: 5679  total_loss: 0.603  loss_cls: 0.169  loss_box_reg: 0.388  loss_rpn_cls: 0.008  loss_rpn_loc: 0.034  time: 0.4526  data_time: 0.0059  lr: 0.000250  max_mem: 7096M
[09/26 20:25:20 d2.utils.events]:  eta: 0:32:14  iter: 5699  total_loss: 0.443  loss_cls: 0.118  loss_box_reg: 0.315  loss_rpn_cls: 0.005  loss_rpn_loc: 0.021  time: 0.4526  data_time: 0.0237  lr: 0.000250  max_mem: 7096M
[09/26 20:25:29 d2.utils.events]:  eta: 0:32:05  iter: 5719  total_loss: 0.495  loss_cls: 0.146  loss_box_reg: 0.312  loss_rpn_cls: 0.005  loss_rpn_loc: 0.020  time: 0.4523  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:25:38 d2.utils.events]:  eta: 0:31:55  iter: 5739  total_loss: 0.487  loss_cls: 0.141  loss_box_reg: 0.318  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4520  data_time: 0.0255  lr: 0.000250  max_mem: 7096M
[09/26 20:25:48 d2.utils.events]:  eta: 0:31:48  iter: 5759  total_loss: 0.521  loss_cls: 0.165  loss_box_reg: 0.312  loss_rpn_cls: 0.003  loss_rpn_loc: 0.017  time: 0.4525  data_time: 0.0271  lr: 0.000250  max_mem: 7096M
[09/26 20:25:56 d2.utils.events]:  eta: 0:31:38  iter: 5779  total_loss: 0.534  loss_cls: 0.139  loss_box_reg: 0.346  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4520  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:26:06 d2.utils.events]:  eta: 0:31:32  iter: 5799  total_loss: 0.586  loss_cls: 0.162  loss_box_reg: 0.348  loss_rpn_cls: 0.002  loss_rpn_loc: 0.032  time: 0.4526  data_time: 0.0228  lr: 0.000250  max_mem: 7096M
[09/26 20:26:14 d2.utils.events]:  eta: 0:31:20  iter: 5819  total_loss: 0.458  loss_cls: 0.148  loss_box_reg: 0.302  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4518  data_time: 0.0072  lr: 0.000250  max_mem: 7096M
[09/26 20:26:23 d2.utils.events]:  eta: 0:31:10  iter: 5839  total_loss: 0.484  loss_cls: 0.141  loss_box_reg: 0.323  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4516  data_time: 0.0134  lr: 0.000250  max_mem: 7096M
[09/26 20:26:33 d2.utils.events]:  eta: 0:31:03  iter: 5859  total_loss: 0.469  loss_cls: 0.124  loss_box_reg: 0.287  loss_rpn_cls: 0.003  loss_rpn_loc: 0.021  time: 0.4524  data_time: 0.0370  lr: 0.000250  max_mem: 7096M
[09/26 20:26:42 d2.utils.events]:  eta: 0:30:54  iter: 5879  total_loss: 0.471  loss_cls: 0.128  loss_box_reg: 0.327  loss_rpn_cls: 0.001  loss_rpn_loc: 0.024  time: 0.4523  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:26:51 d2.utils.events]:  eta: 0:30:44  iter: 5899  total_loss: 0.468  loss_cls: 0.107  loss_box_reg: 0.318  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4520  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:26:59 d2.utils.events]:  eta: 0:30:35  iter: 5919  total_loss: 0.523  loss_cls: 0.160  loss_box_reg: 0.343  loss_rpn_cls: 0.003  loss_rpn_loc: 0.027  time: 0.4518  data_time: 0.0063  lr: 0.000250  max_mem: 7096M
[09/26 20:27:09 d2.utils.events]:  eta: 0:30:25  iter: 5939  total_loss: 0.543  loss_cls: 0.142  loss_box_reg: 0.370  loss_rpn_cls: 0.002  loss_rpn_loc: 0.025  time: 0.4518  data_time: 0.0095  lr: 0.000250  max_mem: 7096M
[09/26 20:27:18 d2.utils.events]:  eta: 0:30:17  iter: 5959  total_loss: 0.418  loss_cls: 0.117  loss_box_reg: 0.284  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4522  data_time: 0.0300  lr: 0.000250  max_mem: 7096M
[09/26 20:27:27 d2.utils.events]:  eta: 0:30:09  iter: 5979  total_loss: 0.541  loss_cls: 0.129  loss_box_reg: 0.357  loss_rpn_cls: 0.004  loss_rpn_loc: 0.029  time: 0.4526  data_time: 0.0441  lr: 0.000250  max_mem: 7096M
[09/26 20:27:37 d2.utils.events]:  eta: 0:30:03  iter: 5999  total_loss: 0.481  loss_cls: 0.110  loss_box_reg: 0.323  loss_rpn_cls: 0.004  loss_rpn_loc: 0.027  time: 0.4529  data_time: 0.0068  lr: 0.000250  max_mem: 7096M
[09/26 20:27:46 d2.utils.events]:  eta: 0:29:54  iter: 6019  total_loss: 0.509  loss_cls: 0.134  loss_box_reg: 0.332  loss_rpn_cls: 0.002  loss_rpn_loc: 0.018  time: 0.4529  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:27:55 d2.utils.events]:  eta: 0:29:46  iter: 6039  total_loss: 0.473  loss_cls: 0.121  loss_box_reg: 0.310  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4527  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:28:04 d2.utils.events]:  eta: 0:29:40  iter: 6059  total_loss: 0.518  loss_cls: 0.154  loss_box_reg: 0.347  loss_rpn_cls: 0.004  loss_rpn_loc: 0.020  time: 0.4526  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:28:13 d2.utils.events]:  eta: 0:29:31  iter: 6079  total_loss: 0.580  loss_cls: 0.182  loss_box_reg: 0.367  loss_rpn_cls: 0.006  loss_rpn_loc: 0.023  time: 0.4526  data_time: 0.0056  lr: 0.000250  max_mem: 7096M
[09/26 20:28:22 d2.utils.events]:  eta: 0:29:22  iter: 6099  total_loss: 0.495  loss_cls: 0.130  loss_box_reg: 0.313  loss_rpn_cls: 0.003  loss_rpn_loc: 0.032  time: 0.4527  data_time: 0.0053  lr: 0.000250  max_mem: 7096M
[09/26 20:28:31 d2.utils.events]:  eta: 0:29:14  iter: 6119  total_loss: 0.652  loss_cls: 0.176  loss_box_reg: 0.409  loss_rpn_cls: 0.003  loss_rpn_loc: 0.034  time: 0.4530  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:28:40 d2.utils.events]:  eta: 0:29:03  iter: 6139  total_loss: 0.508  loss_cls: 0.140  loss_box_reg: 0.315  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4527  data_time: 0.0149  lr: 0.000250  max_mem: 7096M
[09/26 20:28:49 d2.utils.events]:  eta: 0:28:54  iter: 6159  total_loss: 0.464  loss_cls: 0.143  loss_box_reg: 0.306  loss_rpn_cls: 0.003  loss_rpn_loc: 0.025  time: 0.4529  data_time: 0.0192  lr: 0.000250  max_mem: 7096M
[09/26 20:28:58 d2.utils.events]:  eta: 0:28:47  iter: 6179  total_loss: 0.497  loss_cls: 0.129  loss_box_reg: 0.328  loss_rpn_cls: 0.003  loss_rpn_loc: 0.027  time: 0.4529  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:29:07 d2.utils.events]:  eta: 0:28:38  iter: 6199  total_loss: 0.530  loss_cls: 0.143  loss_box_reg: 0.334  loss_rpn_cls: 0.008  loss_rpn_loc: 0.022  time: 0.4527  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:29:16 d2.utils.events]:  eta: 0:28:28  iter: 6219  total_loss: 0.420  loss_cls: 0.131  loss_box_reg: 0.269  loss_rpn_cls: 0.005  loss_rpn_loc: 0.017  time: 0.4523  data_time: 0.0052  lr: 0.000250  max_mem: 7096M
[09/26 20:29:25 d2.utils.events]:  eta: 0:28:20  iter: 6239  total_loss: 0.487  loss_cls: 0.126  loss_box_reg: 0.340  loss_rpn_cls: 0.003  loss_rpn_loc: 0.028  time: 0.4525  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:29:34 d2.utils.events]:  eta: 0:28:11  iter: 6259  total_loss: 0.489  loss_cls: 0.142  loss_box_reg: 0.316  loss_rpn_cls: 0.006  loss_rpn_loc: 0.033  time: 0.4525  data_time: 0.0063  lr: 0.000250  max_mem: 7096M
[09/26 20:29:43 d2.utils.events]:  eta: 0:28:02  iter: 6279  total_loss: 0.473  loss_cls: 0.135  loss_box_reg: 0.325  loss_rpn_cls: 0.001  loss_rpn_loc: 0.014  time: 0.4524  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:29:52 d2.utils.events]:  eta: 0:27:52  iter: 6299  total_loss: 0.492  loss_cls: 0.138  loss_box_reg: 0.308  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4526  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:30:02 d2.utils.events]:  eta: 0:27:44  iter: 6319  total_loss: 0.521  loss_cls: 0.154  loss_box_reg: 0.311  loss_rpn_cls: 0.004  loss_rpn_loc: 0.027  time: 0.4527  data_time: 0.0095  lr: 0.000250  max_mem: 7096M
[09/26 20:30:11 d2.utils.events]:  eta: 0:27:38  iter: 6339  total_loss: 0.481  loss_cls: 0.131  loss_box_reg: 0.326  loss_rpn_cls: 0.002  loss_rpn_loc: 0.031  time: 0.4528  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:30:20 d2.utils.events]:  eta: 0:27:29  iter: 6359  total_loss: 0.463  loss_cls: 0.117  loss_box_reg: 0.338  loss_rpn_cls: 0.004  loss_rpn_loc: 0.029  time: 0.4530  data_time: 0.0073  lr: 0.000250  max_mem: 7096M
[09/26 20:30:29 d2.utils.events]:  eta: 0:27:19  iter: 6379  total_loss: 0.411  loss_cls: 0.107  loss_box_reg: 0.275  loss_rpn_cls: 0.001  loss_rpn_loc: 0.015  time: 0.4526  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:30:37 d2.utils.events]:  eta: 0:27:08  iter: 6399  total_loss: 0.405  loss_cls: 0.094  loss_box_reg: 0.278  loss_rpn_cls: 0.002  loss_rpn_loc: 0.013  time: 0.4517  data_time: 0.0056  lr: 0.000250  max_mem: 7096M
[09/26 20:30:46 d2.utils.events]:  eta: 0:27:01  iter: 6419  total_loss: 0.487  loss_cls: 0.109  loss_box_reg: 0.367  loss_rpn_cls: 0.003  loss_rpn_loc: 0.026  time: 0.4522  data_time: 0.0469  lr: 0.000250  max_mem: 7096M
[09/26 20:30:55 d2.utils.events]:  eta: 0:26:50  iter: 6439  total_loss: 0.508  loss_cls: 0.132  loss_box_reg: 0.309  loss_rpn_cls: 0.002  loss_rpn_loc: 0.019  time: 0.4521  data_time: 0.0091  lr: 0.000250  max_mem: 7096M
[09/26 20:31:05 d2.utils.events]:  eta: 0:26:41  iter: 6459  total_loss: 0.545  loss_cls: 0.180  loss_box_reg: 0.332  loss_rpn_cls: 0.009  loss_rpn_loc: 0.023  time: 0.4524  data_time: 0.0054  lr: 0.000250  max_mem: 7096M
[09/26 20:31:14 d2.utils.events]:  eta: 0:26:32  iter: 6479  total_loss: 0.452  loss_cls: 0.104  loss_box_reg: 0.308  loss_rpn_cls: 0.001  loss_rpn_loc: 0.025  time: 0.4523  data_time: 0.0076  lr: 0.000250  max_mem: 7096M
[09/26 20:31:22 d2.utils.events]:  eta: 0:26:23  iter: 6499  total_loss: 0.461  loss_cls: 0.141  loss_box_reg: 0.305  loss_rpn_cls: 0.001  loss_rpn_loc: 0.019  time: 0.4522  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:31:32 d2.utils.events]:  eta: 0:26:15  iter: 6519  total_loss: 0.452  loss_cls: 0.119  loss_box_reg: 0.305  loss_rpn_cls: 0.004  loss_rpn_loc: 0.039  time: 0.4523  data_time: 0.0180  lr: 0.000250  max_mem: 7096M
[09/26 20:31:41 d2.utils.events]:  eta: 0:26:07  iter: 6539  total_loss: 0.513  loss_cls: 0.145  loss_box_reg: 0.329  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4522  data_time: 0.0087  lr: 0.000250  max_mem: 7096M
[09/26 20:31:50 d2.utils.events]:  eta: 0:26:01  iter: 6559  total_loss: 0.476  loss_cls: 0.125  loss_box_reg: 0.301  loss_rpn_cls: 0.003  loss_rpn_loc: 0.025  time: 0.4524  data_time: 0.0051  lr: 0.000250  max_mem: 7096M
[09/26 20:31:59 d2.utils.events]:  eta: 0:25:48  iter: 6579  total_loss: 0.527  loss_cls: 0.128  loss_box_reg: 0.357  loss_rpn_cls: 0.006  loss_rpn_loc: 0.034  time: 0.4522  data_time: 0.0073  lr: 0.000250  max_mem: 7096M
[09/26 20:32:08 d2.utils.events]:  eta: 0:25:36  iter: 6599  total_loss: 0.507  loss_cls: 0.135  loss_box_reg: 0.291  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4522  data_time: 0.0276  lr: 0.000250  max_mem: 7096M
[09/26 20:32:17 d2.utils.events]:  eta: 0:25:27  iter: 6619  total_loss: 0.456  loss_cls: 0.125  loss_box_reg: 0.316  loss_rpn_cls: 0.005  loss_rpn_loc: 0.033  time: 0.4524  data_time: 0.0077  lr: 0.000250  max_mem: 7096M
[09/26 20:32:26 d2.utils.events]:  eta: 0:25:17  iter: 6639  total_loss: 0.471  loss_cls: 0.139  loss_box_reg: 0.321  loss_rpn_cls: 0.004  loss_rpn_loc: 0.022  time: 0.4520  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:32:34 d2.utils.events]:  eta: 0:25:09  iter: 6659  total_loss: 0.477  loss_cls: 0.143  loss_box_reg: 0.307  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4519  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:32:43 d2.utils.events]:  eta: 0:24:57  iter: 6679  total_loss: 0.474  loss_cls: 0.130  loss_box_reg: 0.295  loss_rpn_cls: 0.003  loss_rpn_loc: 0.020  time: 0.4518  data_time: 0.0075  lr: 0.000250  max_mem: 7096M
[09/26 20:32:53 d2.utils.events]:  eta: 0:24:49  iter: 6699  total_loss: 0.533  loss_cls: 0.157  loss_box_reg: 0.323  loss_rpn_cls: 0.003  loss_rpn_loc: 0.034  time: 0.4520  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:33:03 d2.utils.events]:  eta: 0:24:42  iter: 6719  total_loss: 0.550  loss_cls: 0.155  loss_box_reg: 0.318  loss_rpn_cls: 0.004  loss_rpn_loc: 0.024  time: 0.4525  data_time: 0.0344  lr: 0.000250  max_mem: 7096M
[09/26 20:33:12 d2.utils.events]:  eta: 0:24:33  iter: 6739  total_loss: 0.466  loss_cls: 0.127  loss_box_reg: 0.304  loss_rpn_cls: 0.005  loss_rpn_loc: 0.015  time: 0.4525  data_time: 0.0607  lr: 0.000250  max_mem: 7096M
[09/26 20:33:21 d2.utils.events]:  eta: 0:24:24  iter: 6759  total_loss: 0.520  loss_cls: 0.160  loss_box_reg: 0.346  loss_rpn_cls: 0.002  loss_rpn_loc: 0.017  time: 0.4526  data_time: 0.0288  lr: 0.000250  max_mem: 7096M
[09/26 20:33:30 d2.utils.events]:  eta: 0:24:15  iter: 6779  total_loss: 0.516  loss_cls: 0.106  loss_box_reg: 0.348  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4526  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:33:39 d2.utils.events]:  eta: 0:24:05  iter: 6799  total_loss: 0.558  loss_cls: 0.169  loss_box_reg: 0.350  loss_rpn_cls: 0.004  loss_rpn_loc: 0.026  time: 0.4526  data_time: 0.0054  lr: 0.000250  max_mem: 7096M
[09/26 20:33:48 d2.utils.events]:  eta: 0:23:57  iter: 6819  total_loss: 0.425  loss_cls: 0.114  loss_box_reg: 0.306  loss_rpn_cls: 0.006  loss_rpn_loc: 0.024  time: 0.4526  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:33:57 d2.utils.events]:  eta: 0:23:48  iter: 6839  total_loss: 0.457  loss_cls: 0.113  loss_box_reg: 0.296  loss_rpn_cls: 0.004  loss_rpn_loc: 0.018  time: 0.4526  data_time: 0.0154  lr: 0.000250  max_mem: 7096M
[09/26 20:34:07 d2.utils.events]:  eta: 0:23:39  iter: 6859  total_loss: 0.533  loss_cls: 0.135  loss_box_reg: 0.321  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4529  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:34:16 d2.utils.events]:  eta: 0:23:30  iter: 6879  total_loss: 0.542  loss_cls: 0.166  loss_box_reg: 0.341  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4530  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:34:25 d2.utils.events]:  eta: 0:23:21  iter: 6899  total_loss: 0.413  loss_cls: 0.106  loss_box_reg: 0.276  loss_rpn_cls: 0.002  loss_rpn_loc: 0.018  time: 0.4530  data_time: 0.0054  lr: 0.000250  max_mem: 7096M
[09/26 20:34:34 d2.utils.events]:  eta: 0:23:12  iter: 6919  total_loss: 0.583  loss_cls: 0.181  loss_box_reg: 0.357  loss_rpn_cls: 0.003  loss_rpn_loc: 0.027  time: 0.4529  data_time: 0.0194  lr: 0.000250  max_mem: 7096M
[09/26 20:34:43 d2.utils.events]:  eta: 0:23:03  iter: 6939  total_loss: 0.466  loss_cls: 0.124  loss_box_reg: 0.299  loss_rpn_cls: 0.004  loss_rpn_loc: 0.019  time: 0.4528  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:34:52 d2.utils.events]:  eta: 0:22:54  iter: 6959  total_loss: 0.506  loss_cls: 0.137  loss_box_reg: 0.342  loss_rpn_cls: 0.004  loss_rpn_loc: 0.021  time: 0.4527  data_time: 0.0184  lr: 0.000250  max_mem: 7096M
[09/26 20:35:00 d2.utils.events]:  eta: 0:22:44  iter: 6979  total_loss: 0.452  loss_cls: 0.139  loss_box_reg: 0.289  loss_rpn_cls: 0.002  loss_rpn_loc: 0.014  time: 0.4525  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:35:10 d2.utils.events]:  eta: 0:22:35  iter: 6999  total_loss: 0.420  loss_cls: 0.122  loss_box_reg: 0.266  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4526  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:35:19 d2.utils.events]:  eta: 0:22:26  iter: 7019  total_loss: 0.506  loss_cls: 0.138  loss_box_reg: 0.340  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4527  data_time: 0.0243  lr: 0.000250  max_mem: 7096M
[09/26 20:35:28 d2.utils.events]:  eta: 0:22:17  iter: 7039  total_loss: 0.472  loss_cls: 0.131  loss_box_reg: 0.334  loss_rpn_cls: 0.002  loss_rpn_loc: 0.017  time: 0.4527  data_time: 0.0086  lr: 0.000250  max_mem: 7096M
[09/26 20:35:37 d2.utils.events]:  eta: 0:22:07  iter: 7059  total_loss: 0.489  loss_cls: 0.161  loss_box_reg: 0.331  loss_rpn_cls: 0.004  loss_rpn_loc: 0.021  time: 0.4525  data_time: 0.0059  lr: 0.000250  max_mem: 7096M
[09/26 20:35:46 d2.utils.events]:  eta: 0:21:58  iter: 7079  total_loss: 0.482  loss_cls: 0.129  loss_box_reg: 0.322  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4525  data_time: 0.0166  lr: 0.000250  max_mem: 7096M
[09/26 20:35:55 d2.utils.events]:  eta: 0:21:50  iter: 7099  total_loss: 0.427  loss_cls: 0.120  loss_box_reg: 0.269  loss_rpn_cls: 0.002  loss_rpn_loc: 0.024  time: 0.4526  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:36:04 d2.utils.events]:  eta: 0:21:41  iter: 7119  total_loss: 0.456  loss_cls: 0.125  loss_box_reg: 0.304  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4528  data_time: 0.0078  lr: 0.000250  max_mem: 7096M
[09/26 20:36:13 d2.utils.events]:  eta: 0:21:32  iter: 7139  total_loss: 0.453  loss_cls: 0.116  loss_box_reg: 0.303  loss_rpn_cls: 0.007  loss_rpn_loc: 0.019  time: 0.4527  data_time: 0.0409  lr: 0.000250  max_mem: 7096M
[09/26 20:36:23 d2.utils.events]:  eta: 0:21:23  iter: 7159  total_loss: 0.562  loss_cls: 0.134  loss_box_reg: 0.364  loss_rpn_cls: 0.002  loss_rpn_loc: 0.019  time: 0.4528  data_time: 0.0265  lr: 0.000250  max_mem: 7096M
[09/26 20:36:32 d2.utils.events]:  eta: 0:21:14  iter: 7179  total_loss: 0.552  loss_cls: 0.123  loss_box_reg: 0.336  loss_rpn_cls: 0.005  loss_rpn_loc: 0.030  time: 0.4530  data_time: 0.0183  lr: 0.000250  max_mem: 7096M
[09/26 20:36:41 d2.utils.events]:  eta: 0:21:05  iter: 7199  total_loss: 0.488  loss_cls: 0.153  loss_box_reg: 0.322  loss_rpn_cls: 0.009  loss_rpn_loc: 0.021  time: 0.4530  data_time: 0.0052  lr: 0.000250  max_mem: 7096M
[09/26 20:36:50 d2.utils.events]:  eta: 0:20:57  iter: 7219  total_loss: 0.495  loss_cls: 0.134  loss_box_reg: 0.332  loss_rpn_cls: 0.006  loss_rpn_loc: 0.021  time: 0.4530  data_time: 0.0159  lr: 0.000250  max_mem: 7096M
[09/26 20:36:59 d2.utils.events]:  eta: 0:20:47  iter: 7239  total_loss: 0.606  loss_cls: 0.155  loss_box_reg: 0.322  loss_rpn_cls: 0.005  loss_rpn_loc: 0.034  time: 0.4530  data_time: 0.0057  lr: 0.000250  max_mem: 7096M
[09/26 20:37:09 d2.utils.events]:  eta: 0:20:38  iter: 7259  total_loss: 0.446  loss_cls: 0.118  loss_box_reg: 0.309  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4531  data_time: 0.0286  lr: 0.000250  max_mem: 7096M
[09/26 20:37:18 d2.utils.events]:  eta: 0:20:29  iter: 7279  total_loss: 0.554  loss_cls: 0.156  loss_box_reg: 0.344  loss_rpn_cls: 0.004  loss_rpn_loc: 0.033  time: 0.4531  data_time: 0.0063  lr: 0.000250  max_mem: 7096M
[09/26 20:37:27 d2.utils.events]:  eta: 0:20:19  iter: 7299  total_loss: 0.604  loss_cls: 0.205  loss_box_reg: 0.384  loss_rpn_cls: 0.004  loss_rpn_loc: 0.022  time: 0.4531  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:37:36 d2.utils.events]:  eta: 0:20:11  iter: 7319  total_loss: 0.480  loss_cls: 0.088  loss_box_reg: 0.326  loss_rpn_cls: 0.002  loss_rpn_loc: 0.019  time: 0.4533  data_time: 0.0057  lr: 0.000250  max_mem: 7096M
[09/26 20:37:45 d2.utils.events]:  eta: 0:20:01  iter: 7339  total_loss: 0.592  loss_cls: 0.178  loss_box_reg: 0.346  loss_rpn_cls: 0.004  loss_rpn_loc: 0.028  time: 0.4532  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:37:54 d2.utils.events]:  eta: 0:19:52  iter: 7359  total_loss: 0.484  loss_cls: 0.133  loss_box_reg: 0.331  loss_rpn_cls: 0.002  loss_rpn_loc: 0.026  time: 0.4532  data_time: 0.0291  lr: 0.000250  max_mem: 7096M
[09/26 20:38:04 d2.utils.events]:  eta: 0:19:44  iter: 7379  total_loss: 0.460  loss_cls: 0.095  loss_box_reg: 0.295  loss_rpn_cls: 0.004  loss_rpn_loc: 0.024  time: 0.4533  data_time: 0.0072  lr: 0.000250  max_mem: 7096M
[09/26 20:38:13 d2.utils.events]:  eta: 0:19:36  iter: 7399  total_loss: 0.529  loss_cls: 0.133  loss_box_reg: 0.335  loss_rpn_cls: 0.005  loss_rpn_loc: 0.027  time: 0.4535  data_time: 0.0072  lr: 0.000250  max_mem: 7096M
[09/26 20:38:22 d2.utils.events]:  eta: 0:19:27  iter: 7419  total_loss: 0.403  loss_cls: 0.104  loss_box_reg: 0.265  loss_rpn_cls: 0.004  loss_rpn_loc: 0.028  time: 0.4536  data_time: 0.0079  lr: 0.000250  max_mem: 7096M
[09/26 20:38:32 d2.utils.events]:  eta: 0:19:19  iter: 7439  total_loss: 0.527  loss_cls: 0.148  loss_box_reg: 0.326  loss_rpn_cls: 0.004  loss_rpn_loc: 0.030  time: 0.4537  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:38:41 d2.utils.events]:  eta: 0:19:09  iter: 7459  total_loss: 0.602  loss_cls: 0.176  loss_box_reg: 0.369  loss_rpn_cls: 0.008  loss_rpn_loc: 0.040  time: 0.4537  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:38:50 d2.utils.events]:  eta: 0:19:00  iter: 7479  total_loss: 0.534  loss_cls: 0.141  loss_box_reg: 0.326  loss_rpn_cls: 0.006  loss_rpn_loc: 0.024  time: 0.4536  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:38:59 d2.utils.events]:  eta: 0:18:51  iter: 7499  total_loss: 0.456  loss_cls: 0.104  loss_box_reg: 0.319  loss_rpn_cls: 0.003  loss_rpn_loc: 0.017  time: 0.4537  data_time: 0.0188  lr: 0.000250  max_mem: 7096M
[09/26 20:39:08 d2.utils.events]:  eta: 0:18:42  iter: 7519  total_loss: 0.531  loss_cls: 0.165  loss_box_reg: 0.343  loss_rpn_cls: 0.004  loss_rpn_loc: 0.020  time: 0.4536  data_time: 0.0068  lr: 0.000250  max_mem: 7096M
[09/26 20:39:18 d2.utils.events]:  eta: 0:18:33  iter: 7539  total_loss: 0.534  loss_cls: 0.149  loss_box_reg: 0.347  loss_rpn_cls: 0.005  loss_rpn_loc: 0.024  time: 0.4539  data_time: 0.0284  lr: 0.000250  max_mem: 7096M
[09/26 20:39:27 d2.utils.events]:  eta: 0:18:24  iter: 7559  total_loss: 0.572  loss_cls: 0.155  loss_box_reg: 0.371  loss_rpn_cls: 0.006  loss_rpn_loc: 0.034  time: 0.4539  data_time: 0.0190  lr: 0.000250  max_mem: 7096M
[09/26 20:39:36 d2.utils.events]:  eta: 0:18:18  iter: 7579  total_loss: 0.559  loss_cls: 0.177  loss_box_reg: 0.371  loss_rpn_cls: 0.002  loss_rpn_loc: 0.024  time: 0.4540  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:39:46 d2.utils.events]:  eta: 0:18:11  iter: 7599  total_loss: 0.515  loss_cls: 0.183  loss_box_reg: 0.315  loss_rpn_cls: 0.005  loss_rpn_loc: 0.029  time: 0.4542  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:39:55 d2.utils.events]:  eta: 0:18:01  iter: 7619  total_loss: 0.587  loss_cls: 0.148  loss_box_reg: 0.350  loss_rpn_cls: 0.004  loss_rpn_loc: 0.018  time: 0.4544  data_time: 0.0454  lr: 0.000250  max_mem: 7096M
[09/26 20:40:04 d2.utils.events]:  eta: 0:17:53  iter: 7639  total_loss: 0.487  loss_cls: 0.153  loss_box_reg: 0.319  loss_rpn_cls: 0.002  loss_rpn_loc: 0.020  time: 0.4543  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:40:13 d2.utils.events]:  eta: 0:17:43  iter: 7659  total_loss: 0.438  loss_cls: 0.128  loss_box_reg: 0.275  loss_rpn_cls: 0.003  loss_rpn_loc: 0.030  time: 0.4543  data_time: 0.0055  lr: 0.000250  max_mem: 7096M
[09/26 20:40:23 d2.utils.events]:  eta: 0:17:38  iter: 7679  total_loss: 0.459  loss_cls: 0.106  loss_box_reg: 0.308  loss_rpn_cls: 0.003  loss_rpn_loc: 0.031  time: 0.4544  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:40:31 d2.utils.events]:  eta: 0:17:26  iter: 7699  total_loss: 0.429  loss_cls: 0.095  loss_box_reg: 0.298  loss_rpn_cls: 0.003  loss_rpn_loc: 0.019  time: 0.4541  data_time: 0.0210  lr: 0.000250  max_mem: 7096M
[09/26 20:40:40 d2.utils.events]:  eta: 0:17:15  iter: 7719  total_loss: 0.509  loss_cls: 0.136  loss_box_reg: 0.322  loss_rpn_cls: 0.003  loss_rpn_loc: 0.015  time: 0.4539  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:40:48 d2.utils.events]:  eta: 0:17:06  iter: 7739  total_loss: 0.448  loss_cls: 0.126  loss_box_reg: 0.318  loss_rpn_cls: 0.004  loss_rpn_loc: 0.023  time: 0.4538  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:40:57 d2.utils.events]:  eta: 0:16:56  iter: 7759  total_loss: 0.529  loss_cls: 0.166  loss_box_reg: 0.310  loss_rpn_cls: 0.004  loss_rpn_loc: 0.021  time: 0.4537  data_time: 0.0241  lr: 0.000250  max_mem: 7096M
[09/26 20:41:07 d2.utils.events]:  eta: 0:16:49  iter: 7779  total_loss: 0.448  loss_cls: 0.116  loss_box_reg: 0.312  loss_rpn_cls: 0.002  loss_rpn_loc: 0.015  time: 0.4539  data_time: 0.0075  lr: 0.000250  max_mem: 7096M
[09/26 20:41:16 d2.utils.events]:  eta: 0:16:40  iter: 7799  total_loss: 0.480  loss_cls: 0.145  loss_box_reg: 0.275  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4538  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:41:25 d2.utils.events]:  eta: 0:16:30  iter: 7819  total_loss: 0.463  loss_cls: 0.126  loss_box_reg: 0.306  loss_rpn_cls: 0.002  loss_rpn_loc: 0.015  time: 0.4537  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:41:34 d2.utils.events]:  eta: 0:16:21  iter: 7839  total_loss: 0.507  loss_cls: 0.149  loss_box_reg: 0.310  loss_rpn_cls: 0.006  loss_rpn_loc: 0.030  time: 0.4538  data_time: 0.0364  lr: 0.000250  max_mem: 7096M
[09/26 20:41:43 d2.utils.events]:  eta: 0:16:12  iter: 7859  total_loss: 0.397  loss_cls: 0.103  loss_box_reg: 0.290  loss_rpn_cls: 0.002  loss_rpn_loc: 0.018  time: 0.4539  data_time: 0.0268  lr: 0.000250  max_mem: 7096M
[09/26 20:41:52 d2.utils.events]:  eta: 0:16:03  iter: 7879  total_loss: 0.440  loss_cls: 0.102  loss_box_reg: 0.334  loss_rpn_cls: 0.002  loss_rpn_loc: 0.017  time: 0.4540  data_time: 0.0286  lr: 0.000250  max_mem: 7096M
[09/26 20:42:01 d2.utils.events]:  eta: 0:15:54  iter: 7899  total_loss: 0.562  loss_cls: 0.118  loss_box_reg: 0.352  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4539  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:42:10 d2.utils.events]:  eta: 0:15:45  iter: 7919  total_loss: 0.462  loss_cls: 0.127  loss_box_reg: 0.316  loss_rpn_cls: 0.003  loss_rpn_loc: 0.026  time: 0.4539  data_time: 0.0076  lr: 0.000250  max_mem: 7096M
[09/26 20:42:19 d2.utils.events]:  eta: 0:15:36  iter: 7939  total_loss: 0.489  loss_cls: 0.166  loss_box_reg: 0.316  loss_rpn_cls: 0.003  loss_rpn_loc: 0.021  time: 0.4538  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:42:28 d2.utils.events]:  eta: 0:15:26  iter: 7959  total_loss: 0.547  loss_cls: 0.139  loss_box_reg: 0.336  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4537  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:42:37 d2.utils.events]:  eta: 0:15:18  iter: 7979  total_loss: 0.499  loss_cls: 0.144  loss_box_reg: 0.320  loss_rpn_cls: 0.004  loss_rpn_loc: 0.025  time: 0.4537  data_time: 0.0106  lr: 0.000250  max_mem: 7096M
[09/26 20:42:46 d2.utils.events]:  eta: 0:15:08  iter: 7999  total_loss: 0.493  loss_cls: 0.126  loss_box_reg: 0.323  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4537  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:42:55 d2.utils.events]:  eta: 0:14:58  iter: 8019  total_loss: 0.473  loss_cls: 0.120  loss_box_reg: 0.316  loss_rpn_cls: 0.002  loss_rpn_loc: 0.026  time: 0.4537  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:43:05 d2.utils.events]:  eta: 0:14:49  iter: 8039  total_loss: 0.511  loss_cls: 0.112  loss_box_reg: 0.343  loss_rpn_cls: 0.003  loss_rpn_loc: 0.020  time: 0.4538  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:43:13 d2.utils.events]:  eta: 0:14:40  iter: 8059  total_loss: 0.523  loss_cls: 0.144  loss_box_reg: 0.343  loss_rpn_cls: 0.003  loss_rpn_loc: 0.027  time: 0.4537  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:43:22 d2.utils.events]:  eta: 0:14:30  iter: 8079  total_loss: 0.495  loss_cls: 0.144  loss_box_reg: 0.331  loss_rpn_cls: 0.003  loss_rpn_loc: 0.020  time: 0.4536  data_time: 0.0054  lr: 0.000250  max_mem: 7096M
[09/26 20:43:31 d2.utils.events]:  eta: 0:14:20  iter: 8099  total_loss: 0.499  loss_cls: 0.142  loss_box_reg: 0.336  loss_rpn_cls: 0.003  loss_rpn_loc: 0.021  time: 0.4536  data_time: 0.0057  lr: 0.000250  max_mem: 7096M
[09/26 20:43:41 d2.utils.events]:  eta: 0:14:11  iter: 8119  total_loss: 0.597  loss_cls: 0.172  loss_box_reg: 0.370  loss_rpn_cls: 0.008  loss_rpn_loc: 0.031  time: 0.4537  data_time: 0.0285  lr: 0.000250  max_mem: 7096M
[09/26 20:43:50 d2.utils.events]:  eta: 0:14:03  iter: 8139  total_loss: 0.579  loss_cls: 0.172  loss_box_reg: 0.347  loss_rpn_cls: 0.004  loss_rpn_loc: 0.017  time: 0.4538  data_time: 0.0422  lr: 0.000250  max_mem: 7096M
[09/26 20:43:59 d2.utils.events]:  eta: 0:13:53  iter: 8159  total_loss: 0.499  loss_cls: 0.167  loss_box_reg: 0.318  loss_rpn_cls: 0.002  loss_rpn_loc: 0.028  time: 0.4538  data_time: 0.0208  lr: 0.000250  max_mem: 7096M
[09/26 20:44:09 d2.utils.events]:  eta: 0:13:45  iter: 8179  total_loss: 0.482  loss_cls: 0.137  loss_box_reg: 0.317  loss_rpn_cls: 0.005  loss_rpn_loc: 0.030  time: 0.4540  data_time: 0.0302  lr: 0.000250  max_mem: 7096M
[09/26 20:44:18 d2.utils.events]:  eta: 0:13:35  iter: 8199  total_loss: 0.487  loss_cls: 0.139  loss_box_reg: 0.311  loss_rpn_cls: 0.004  loss_rpn_loc: 0.022  time: 0.4539  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:44:27 d2.utils.events]:  eta: 0:13:27  iter: 8219  total_loss: 0.480  loss_cls: 0.125  loss_box_reg: 0.315  loss_rpn_cls: 0.005  loss_rpn_loc: 0.023  time: 0.4539  data_time: 0.0068  lr: 0.000250  max_mem: 7096M
[09/26 20:44:36 d2.utils.events]:  eta: 0:13:20  iter: 8239  total_loss: 0.456  loss_cls: 0.128  loss_box_reg: 0.299  loss_rpn_cls: 0.005  loss_rpn_loc: 0.029  time: 0.4540  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:44:45 d2.utils.events]:  eta: 0:13:10  iter: 8259  total_loss: 0.371  loss_cls: 0.098  loss_box_reg: 0.262  loss_rpn_cls: 0.004  loss_rpn_loc: 0.013  time: 0.4539  data_time: 0.0191  lr: 0.000250  max_mem: 7096M
[09/26 20:44:54 d2.utils.events]:  eta: 0:13:01  iter: 8279  total_loss: 0.443  loss_cls: 0.118  loss_box_reg: 0.305  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4539  data_time: 0.0077  lr: 0.000250  max_mem: 7096M
[09/26 20:45:03 d2.utils.events]:  eta: 0:12:52  iter: 8299  total_loss: 0.524  loss_cls: 0.165  loss_box_reg: 0.307  loss_rpn_cls: 0.008  loss_rpn_loc: 0.020  time: 0.4539  data_time: 0.0166  lr: 0.000250  max_mem: 7096M
[09/26 20:45:12 d2.utils.events]:  eta: 0:12:41  iter: 8319  total_loss: 0.456  loss_cls: 0.109  loss_box_reg: 0.306  loss_rpn_cls: 0.002  loss_rpn_loc: 0.016  time: 0.4539  data_time: 0.0078  lr: 0.000250  max_mem: 7096M
[09/26 20:45:21 d2.utils.events]:  eta: 0:12:34  iter: 8339  total_loss: 0.445  loss_cls: 0.129  loss_box_reg: 0.302  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4539  data_time: 0.0341  lr: 0.000250  max_mem: 7096M
[09/26 20:45:30 d2.utils.events]:  eta: 0:12:23  iter: 8359  total_loss: 0.488  loss_cls: 0.131  loss_box_reg: 0.316  loss_rpn_cls: 0.002  loss_rpn_loc: 0.018  time: 0.4538  data_time: 0.0171  lr: 0.000250  max_mem: 7096M
[09/26 20:45:39 d2.utils.events]:  eta: 0:12:14  iter: 8379  total_loss: 0.388  loss_cls: 0.103  loss_box_reg: 0.291  loss_rpn_cls: 0.003  loss_rpn_loc: 0.017  time: 0.4538  data_time: 0.0050  lr: 0.000250  max_mem: 7096M
[09/26 20:45:49 d2.utils.events]:  eta: 0:12:04  iter: 8399  total_loss: 0.435  loss_cls: 0.140  loss_box_reg: 0.285  loss_rpn_cls: 0.005  loss_rpn_loc: 0.033  time: 0.4539  data_time: 0.0346  lr: 0.000250  max_mem: 7096M
[09/26 20:45:58 d2.utils.events]:  eta: 0:11:56  iter: 8419  total_loss: 0.502  loss_cls: 0.134  loss_box_reg: 0.342  loss_rpn_cls: 0.004  loss_rpn_loc: 0.023  time: 0.4540  data_time: 0.0063  lr: 0.000250  max_mem: 7096M
[09/26 20:46:08 d2.utils.events]:  eta: 0:11:47  iter: 8439  total_loss: 0.535  loss_cls: 0.157  loss_box_reg: 0.320  loss_rpn_cls: 0.003  loss_rpn_loc: 0.035  time: 0.4541  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:46:17 d2.utils.events]:  eta: 0:11:38  iter: 8459  total_loss: 0.430  loss_cls: 0.117  loss_box_reg: 0.279  loss_rpn_cls: 0.004  loss_rpn_loc: 0.025  time: 0.4541  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:46:26 d2.utils.events]:  eta: 0:11:30  iter: 8479  total_loss: 0.476  loss_cls: 0.127  loss_box_reg: 0.286  loss_rpn_cls: 0.003  loss_rpn_loc: 0.026  time: 0.4541  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:46:34 d2.utils.events]:  eta: 0:11:19  iter: 8499  total_loss: 0.423  loss_cls: 0.108  loss_box_reg: 0.288  loss_rpn_cls: 0.002  loss_rpn_loc: 0.019  time: 0.4540  data_time: 0.0078  lr: 0.000250  max_mem: 7096M
[09/26 20:46:43 d2.utils.events]:  eta: 0:11:10  iter: 8519  total_loss: 0.501  loss_cls: 0.112  loss_box_reg: 0.302  loss_rpn_cls: 0.003  loss_rpn_loc: 0.037  time: 0.4539  data_time: 0.0080  lr: 0.000250  max_mem: 7096M
[09/26 20:46:53 d2.utils.events]:  eta: 0:11:03  iter: 8539  total_loss: 0.544  loss_cls: 0.192  loss_box_reg: 0.342  loss_rpn_cls: 0.005  loss_rpn_loc: 0.022  time: 0.4541  data_time: 0.0078  lr: 0.000250  max_mem: 7096M
[09/26 20:47:02 d2.utils.events]:  eta: 0:10:52  iter: 8559  total_loss: 0.467  loss_cls: 0.132  loss_box_reg: 0.300  loss_rpn_cls: 0.004  loss_rpn_loc: 0.029  time: 0.4540  data_time: 0.0080  lr: 0.000250  max_mem: 7096M
[09/26 20:47:11 d2.utils.events]:  eta: 0:10:42  iter: 8579  total_loss: 0.473  loss_cls: 0.126  loss_box_reg: 0.322  loss_rpn_cls: 0.004  loss_rpn_loc: 0.024  time: 0.4540  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:47:20 d2.utils.events]:  eta: 0:10:32  iter: 8599  total_loss: 0.490  loss_cls: 0.128  loss_box_reg: 0.331  loss_rpn_cls: 0.004  loss_rpn_loc: 0.018  time: 0.4540  data_time: 0.0374  lr: 0.000250  max_mem: 7096M
[09/26 20:47:29 d2.utils.events]:  eta: 0:10:23  iter: 8619  total_loss: 0.561  loss_cls: 0.151  loss_box_reg: 0.344  loss_rpn_cls: 0.002  loss_rpn_loc: 0.021  time: 0.4540  data_time: 0.0077  lr: 0.000250  max_mem: 7096M
[09/26 20:47:38 d2.utils.events]:  eta: 0:10:14  iter: 8639  total_loss: 0.541  loss_cls: 0.185  loss_box_reg: 0.295  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4540  data_time: 0.0068  lr: 0.000250  max_mem: 7096M
[09/26 20:47:47 d2.utils.events]:  eta: 0:10:05  iter: 8659  total_loss: 0.468  loss_cls: 0.089  loss_box_reg: 0.330  loss_rpn_cls: 0.004  loss_rpn_loc: 0.021  time: 0.4540  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:47:57 d2.utils.events]:  eta: 0:09:54  iter: 8679  total_loss: 0.479  loss_cls: 0.113  loss_box_reg: 0.340  loss_rpn_cls: 0.002  loss_rpn_loc: 0.016  time: 0.4541  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:48:06 d2.utils.events]:  eta: 0:09:47  iter: 8699  total_loss: 0.590  loss_cls: 0.185  loss_box_reg: 0.349  loss_rpn_cls: 0.005  loss_rpn_loc: 0.026  time: 0.4541  data_time: 0.0072  lr: 0.000250  max_mem: 7096M
[09/26 20:48:15 d2.utils.events]:  eta: 0:09:40  iter: 8719  total_loss: 0.568  loss_cls: 0.147  loss_box_reg: 0.358  loss_rpn_cls: 0.004  loss_rpn_loc: 0.019  time: 0.4542  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:48:24 d2.utils.events]:  eta: 0:09:31  iter: 8739  total_loss: 0.436  loss_cls: 0.144  loss_box_reg: 0.279  loss_rpn_cls: 0.004  loss_rpn_loc: 0.031  time: 0.4542  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:48:33 d2.utils.events]:  eta: 0:09:23  iter: 8759  total_loss: 0.449  loss_cls: 0.121  loss_box_reg: 0.290  loss_rpn_cls: 0.005  loss_rpn_loc: 0.022  time: 0.4542  data_time: 0.0301  lr: 0.000250  max_mem: 7096M
[09/26 20:48:43 d2.utils.events]:  eta: 0:09:11  iter: 8779  total_loss: 0.363  loss_cls: 0.091  loss_box_reg: 0.224  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4543  data_time: 0.0481  lr: 0.000250  max_mem: 7096M
[09/26 20:48:52 d2.utils.events]:  eta: 0:09:02  iter: 8799  total_loss: 0.494  loss_cls: 0.153  loss_box_reg: 0.322  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4543  data_time: 0.0073  lr: 0.000250  max_mem: 7096M
[09/26 20:49:02 d2.utils.events]:  eta: 0:08:53  iter: 8819  total_loss: 0.544  loss_cls: 0.143  loss_box_reg: 0.332  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4545  data_time: 0.0430  lr: 0.000250  max_mem: 7096M
[09/26 20:49:11 d2.utils.events]:  eta: 0:08:45  iter: 8839  total_loss: 0.476  loss_cls: 0.127  loss_box_reg: 0.305  loss_rpn_cls: 0.004  loss_rpn_loc: 0.021  time: 0.4545  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:49:20 d2.utils.events]:  eta: 0:08:35  iter: 8859  total_loss: 0.501  loss_cls: 0.090  loss_box_reg: 0.335  loss_rpn_cls: 0.002  loss_rpn_loc: 0.020  time: 0.4544  data_time: 0.0085  lr: 0.000250  max_mem: 7096M
[09/26 20:49:29 d2.utils.events]:  eta: 0:08:26  iter: 8879  total_loss: 0.520  loss_cls: 0.134  loss_box_reg: 0.345  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4544  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:49:38 d2.utils.events]:  eta: 0:08:17  iter: 8899  total_loss: 0.525  loss_cls: 0.151  loss_box_reg: 0.340  loss_rpn_cls: 0.004  loss_rpn_loc: 0.024  time: 0.4545  data_time: 0.0057  lr: 0.000250  max_mem: 7096M
[09/26 20:49:48 d2.utils.events]:  eta: 0:08:11  iter: 8919  total_loss: 0.494  loss_cls: 0.133  loss_box_reg: 0.344  loss_rpn_cls: 0.004  loss_rpn_loc: 0.029  time: 0.4547  data_time: 0.0079  lr: 0.000250  max_mem: 7096M
[09/26 20:49:58 d2.utils.events]:  eta: 0:08:02  iter: 8939  total_loss: 0.459  loss_cls: 0.108  loss_box_reg: 0.295  loss_rpn_cls: 0.005  loss_rpn_loc: 0.026  time: 0.4548  data_time: 0.0241  lr: 0.000250  max_mem: 7096M
[09/26 20:50:07 d2.utils.events]:  eta: 0:07:53  iter: 8959  total_loss: 0.497  loss_cls: 0.142  loss_box_reg: 0.310  loss_rpn_cls: 0.005  loss_rpn_loc: 0.027  time: 0.4548  data_time: 0.0086  lr: 0.000250  max_mem: 7096M
[09/26 20:50:16 d2.utils.events]:  eta: 0:07:44  iter: 8979  total_loss: 0.416  loss_cls: 0.134  loss_box_reg: 0.259  loss_rpn_cls: 0.004  loss_rpn_loc: 0.023  time: 0.4548  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:50:25 d2.utils.events]:  eta: 0:07:36  iter: 8999  total_loss: 0.546  loss_cls: 0.198  loss_box_reg: 0.320  loss_rpn_cls: 0.004  loss_rpn_loc: 0.027  time: 0.4549  data_time: 0.0054  lr: 0.000250  max_mem: 7096M
[09/26 20:50:35 d2.utils.events]:  eta: 0:07:27  iter: 9019  total_loss: 0.496  loss_cls: 0.126  loss_box_reg: 0.294  loss_rpn_cls: 0.002  loss_rpn_loc: 0.029  time: 0.4549  data_time: 0.0104  lr: 0.000250  max_mem: 7096M
[09/26 20:50:44 d2.utils.events]:  eta: 0:07:18  iter: 9039  total_loss: 0.492  loss_cls: 0.141  loss_box_reg: 0.326  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4549  data_time: 0.0100  lr: 0.000250  max_mem: 7096M
[09/26 20:50:53 d2.utils.events]:  eta: 0:07:09  iter: 9059  total_loss: 0.497  loss_cls: 0.153  loss_box_reg: 0.297  loss_rpn_cls: 0.003  loss_rpn_loc: 0.028  time: 0.4549  data_time: 0.0084  lr: 0.000250  max_mem: 7096M
[09/26 20:51:02 d2.utils.events]:  eta: 0:07:00  iter: 9079  total_loss: 0.400  loss_cls: 0.094  loss_box_reg: 0.282  loss_rpn_cls: 0.003  loss_rpn_loc: 0.019  time: 0.4550  data_time: 0.0560  lr: 0.000250  max_mem: 7096M
[09/26 20:51:11 d2.utils.events]:  eta: 0:06:50  iter: 9099  total_loss: 0.460  loss_cls: 0.123  loss_box_reg: 0.339  loss_rpn_cls: 0.003  loss_rpn_loc: 0.025  time: 0.4550  data_time: 0.0059  lr: 0.000250  max_mem: 7096M
[09/26 20:51:20 d2.utils.events]:  eta: 0:06:41  iter: 9119  total_loss: 0.438  loss_cls: 0.128  loss_box_reg: 0.279  loss_rpn_cls: 0.003  loss_rpn_loc: 0.025  time: 0.4550  data_time: 0.0094  lr: 0.000250  max_mem: 7096M
[09/26 20:51:29 d2.utils.events]:  eta: 0:06:32  iter: 9139  total_loss: 0.473  loss_cls: 0.098  loss_box_reg: 0.280  loss_rpn_cls: 0.003  loss_rpn_loc: 0.027  time: 0.4550  data_time: 0.0088  lr: 0.000250  max_mem: 7096M
[09/26 20:51:39 d2.utils.events]:  eta: 0:06:23  iter: 9159  total_loss: 0.430  loss_cls: 0.116  loss_box_reg: 0.309  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4551  data_time: 0.0176  lr: 0.000250  max_mem: 7096M
[09/26 20:51:48 d2.utils.events]:  eta: 0:06:14  iter: 9179  total_loss: 0.530  loss_cls: 0.158  loss_box_reg: 0.308  loss_rpn_cls: 0.003  loss_rpn_loc: 0.021  time: 0.4551  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:51:57 d2.utils.events]:  eta: 0:06:04  iter: 9199  total_loss: 0.420  loss_cls: 0.123  loss_box_reg: 0.272  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4551  data_time: 0.0533  lr: 0.000250  max_mem: 7096M
[09/26 20:52:06 d2.utils.events]:  eta: 0:05:55  iter: 9219  total_loss: 0.552  loss_cls: 0.157  loss_box_reg: 0.331  loss_rpn_cls: 0.006  loss_rpn_loc: 0.028  time: 0.4550  data_time: 0.0082  lr: 0.000250  max_mem: 7096M
[09/26 20:52:15 d2.utils.events]:  eta: 0:05:45  iter: 9239  total_loss: 0.569  loss_cls: 0.172  loss_box_reg: 0.371  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4549  data_time: 0.0093  lr: 0.000250  max_mem: 7096M
[09/26 20:52:24 d2.utils.events]:  eta: 0:05:36  iter: 9259  total_loss: 0.541  loss_cls: 0.156  loss_box_reg: 0.314  loss_rpn_cls: 0.008  loss_rpn_loc: 0.033  time: 0.4549  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:52:33 d2.utils.events]:  eta: 0:05:27  iter: 9279  total_loss: 0.478  loss_cls: 0.110  loss_box_reg: 0.335  loss_rpn_cls: 0.002  loss_rpn_loc: 0.014  time: 0.4548  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:52:42 d2.utils.events]:  eta: 0:05:18  iter: 9299  total_loss: 0.656  loss_cls: 0.215  loss_box_reg: 0.348  loss_rpn_cls: 0.004  loss_rpn_loc: 0.036  time: 0.4549  data_time: 0.0075  lr: 0.000250  max_mem: 7096M
[09/26 20:52:51 d2.utils.events]:  eta: 0:05:10  iter: 9319  total_loss: 0.528  loss_cls: 0.139  loss_box_reg: 0.364  loss_rpn_cls: 0.003  loss_rpn_loc: 0.029  time: 0.4549  data_time: 0.0297  lr: 0.000250  max_mem: 7096M
[09/26 20:53:01 d2.utils.events]:  eta: 0:05:01  iter: 9339  total_loss: 0.487  loss_cls: 0.112  loss_box_reg: 0.303  loss_rpn_cls: 0.002  loss_rpn_loc: 0.017  time: 0.4550  data_time: 0.0062  lr: 0.000250  max_mem: 7096M
[09/26 20:53:10 d2.utils.events]:  eta: 0:04:52  iter: 9359  total_loss: 0.537  loss_cls: 0.161  loss_box_reg: 0.349  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4550  data_time: 0.0061  lr: 0.000250  max_mem: 7096M
[09/26 20:53:19 d2.utils.events]:  eta: 0:04:43  iter: 9379  total_loss: 0.472  loss_cls: 0.113  loss_box_reg: 0.312  loss_rpn_cls: 0.003  loss_rpn_loc: 0.020  time: 0.4550  data_time: 0.0240  lr: 0.000250  max_mem: 7096M
[09/26 20:53:27 d2.utils.events]:  eta: 0:04:33  iter: 9399  total_loss: 0.511  loss_cls: 0.153  loss_box_reg: 0.330  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4549  data_time: 0.0063  lr: 0.000250  max_mem: 7096M
[09/26 20:53:36 d2.utils.events]:  eta: 0:04:24  iter: 9419  total_loss: 0.549  loss_cls: 0.164  loss_box_reg: 0.349  loss_rpn_cls: 0.005  loss_rpn_loc: 0.025  time: 0.4548  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:53:45 d2.utils.events]:  eta: 0:04:14  iter: 9439  total_loss: 0.448  loss_cls: 0.118  loss_box_reg: 0.291  loss_rpn_cls: 0.002  loss_rpn_loc: 0.020  time: 0.4548  data_time: 0.0139  lr: 0.000250  max_mem: 7096M
[09/26 20:53:55 d2.utils.events]:  eta: 0:04:05  iter: 9459  total_loss: 0.524  loss_cls: 0.154  loss_box_reg: 0.347  loss_rpn_cls: 0.003  loss_rpn_loc: 0.031  time: 0.4548  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:54:04 d2.utils.events]:  eta: 0:03:56  iter: 9479  total_loss: 0.499  loss_cls: 0.137  loss_box_reg: 0.302  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4548  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:54:13 d2.utils.events]:  eta: 0:03:47  iter: 9499  total_loss: 0.423  loss_cls: 0.110  loss_box_reg: 0.278  loss_rpn_cls: 0.002  loss_rpn_loc: 0.026  time: 0.4548  data_time: 0.0099  lr: 0.000250  max_mem: 7096M
[09/26 20:54:22 d2.utils.events]:  eta: 0:03:38  iter: 9519  total_loss: 0.543  loss_cls: 0.173  loss_box_reg: 0.338  loss_rpn_cls: 0.005  loss_rpn_loc: 0.035  time: 0.4548  data_time: 0.0085  lr: 0.000250  max_mem: 7096M
[09/26 20:54:31 d2.utils.events]:  eta: 0:03:29  iter: 9539  total_loss: 0.463  loss_cls: 0.135  loss_box_reg: 0.301  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4548  data_time: 0.0082  lr: 0.000250  max_mem: 7096M
[09/26 20:54:40 d2.utils.events]:  eta: 0:03:20  iter: 9559  total_loss: 0.470  loss_cls: 0.122  loss_box_reg: 0.317  loss_rpn_cls: 0.004  loss_rpn_loc: 0.015  time: 0.4549  data_time: 0.0065  lr: 0.000250  max_mem: 7096M
[09/26 20:54:49 d2.utils.events]:  eta: 0:03:11  iter: 9579  total_loss: 0.470  loss_cls: 0.132  loss_box_reg: 0.315  loss_rpn_cls: 0.003  loss_rpn_loc: 0.018  time: 0.4549  data_time: 0.0093  lr: 0.000250  max_mem: 7096M
[09/26 20:54:58 d2.utils.events]:  eta: 0:03:02  iter: 9599  total_loss: 0.557  loss_cls: 0.164  loss_box_reg: 0.350  loss_rpn_cls: 0.004  loss_rpn_loc: 0.018  time: 0.4548  data_time: 0.0066  lr: 0.000250  max_mem: 7096M
[09/26 20:55:07 d2.utils.events]:  eta: 0:02:53  iter: 9619  total_loss: 0.482  loss_cls: 0.125  loss_box_reg: 0.322  loss_rpn_cls: 0.004  loss_rpn_loc: 0.014  time: 0.4547  data_time: 0.0067  lr: 0.000250  max_mem: 7096M
[09/26 20:55:16 d2.utils.events]:  eta: 0:02:44  iter: 9639  total_loss: 0.492  loss_cls: 0.136  loss_box_reg: 0.314  loss_rpn_cls: 0.004  loss_rpn_loc: 0.026  time: 0.4546  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:55:25 d2.utils.events]:  eta: 0:02:35  iter: 9659  total_loss: 0.597  loss_cls: 0.194  loss_box_reg: 0.379  loss_rpn_cls: 0.004  loss_rpn_loc: 0.029  time: 0.4548  data_time: 0.0058  lr: 0.000250  max_mem: 7096M
[09/26 20:55:34 d2.utils.events]:  eta: 0:02:25  iter: 9679  total_loss: 0.437  loss_cls: 0.081  loss_box_reg: 0.296  loss_rpn_cls: 0.002  loss_rpn_loc: 0.023  time: 0.4547  data_time: 0.0064  lr: 0.000250  max_mem: 7096M
[09/26 20:55:44 d2.utils.events]:  eta: 0:02:16  iter: 9699  total_loss: 0.479  loss_cls: 0.127  loss_box_reg: 0.321  loss_rpn_cls: 0.003  loss_rpn_loc: 0.016  time: 0.4548  data_time: 0.0372  lr: 0.000250  max_mem: 7096M
[09/26 20:55:53 d2.utils.events]:  eta: 0:02:07  iter: 9719  total_loss: 0.475  loss_cls: 0.129  loss_box_reg: 0.317  loss_rpn_cls: 0.005  loss_rpn_loc: 0.017  time: 0.4548  data_time: 0.0073  lr: 0.000250  max_mem: 7096M
[09/26 20:56:02 d2.utils.events]:  eta: 0:01:58  iter: 9739  total_loss: 0.487  loss_cls: 0.125  loss_box_reg: 0.276  loss_rpn_cls: 0.001  loss_rpn_loc: 0.028  time: 0.4547  data_time: 0.0070  lr: 0.000250  max_mem: 7096M
[09/26 20:56:11 d2.utils.events]:  eta: 0:01:49  iter: 9759  total_loss: 0.511  loss_cls: 0.144  loss_box_reg: 0.321  loss_rpn_cls: 0.004  loss_rpn_loc: 0.025  time: 0.4548  data_time: 0.0076  lr: 0.000250  max_mem: 7096M
[09/26 20:56:20 d2.utils.events]:  eta: 0:01:40  iter: 9779  total_loss: 0.466  loss_cls: 0.121  loss_box_reg: 0.314  loss_rpn_cls: 0.003  loss_rpn_loc: 0.030  time: 0.4548  data_time: 0.0071  lr: 0.000250  max_mem: 7096M
[09/26 20:56:29 d2.utils.events]:  eta: 0:01:31  iter: 9799  total_loss: 0.527  loss_cls: 0.130  loss_box_reg: 0.353  loss_rpn_cls: 0.002  loss_rpn_loc: 0.027  time: 0.4548  data_time: 0.0053  lr: 0.000250  max_mem: 7096M
[09/26 20:56:39 d2.utils.events]:  eta: 0:01:22  iter: 9819  total_loss: 0.505  loss_cls: 0.129  loss_box_reg: 0.315  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4549  data_time: 0.0076  lr: 0.000250  max_mem: 7096M
[09/26 20:56:48 d2.utils.events]:  eta: 0:01:13  iter: 9839  total_loss: 0.525  loss_cls: 0.112  loss_box_reg: 0.346  loss_rpn_cls: 0.003  loss_rpn_loc: 0.018  time: 0.4549  data_time: 0.0151  lr: 0.000250  max_mem: 7096M
[09/26 20:56:58 d2.utils.events]:  eta: 0:01:04  iter: 9859  total_loss: 0.480  loss_cls: 0.134  loss_box_reg: 0.280  loss_rpn_cls: 0.003  loss_rpn_loc: 0.031  time: 0.4550  data_time: 0.0167  lr: 0.000250  max_mem: 7096M
[09/26 20:57:07 d2.utils.events]:  eta: 0:00:55  iter: 9879  total_loss: 0.494  loss_cls: 0.121  loss_box_reg: 0.320  loss_rpn_cls: 0.003  loss_rpn_loc: 0.023  time: 0.4550  data_time: 0.0077  lr: 0.000250  max_mem: 7096M
[09/26 20:57:16 d2.utils.events]:  eta: 0:00:46  iter: 9899  total_loss: 0.549  loss_cls: 0.151  loss_box_reg: 0.389  loss_rpn_cls: 0.003  loss_rpn_loc: 0.024  time: 0.4550  data_time: 0.0051  lr: 0.000250  max_mem: 7096M
[09/26 20:57:25 d2.utils.events]:  eta: 0:00:36  iter: 9919  total_loss: 0.521  loss_cls: 0.150  loss_box_reg: 0.318  loss_rpn_cls: 0.002  loss_rpn_loc: 0.022  time: 0.4549  data_time: 0.0069  lr: 0.000250  max_mem: 7096M
[09/26 20:57:34 d2.utils.events]:  eta: 0:00:27  iter: 9939  total_loss: 0.550  loss_cls: 0.140  loss_box_reg: 0.346  loss_rpn_cls: 0.002  loss_rpn_loc: 0.027  time: 0.4550  data_time: 0.0376  lr: 0.000250  max_mem: 7096M
[09/26 20:57:43 d2.utils.events]:  eta: 0:00:18  iter: 9959  total_loss: 0.463  loss_cls: 0.154  loss_box_reg: 0.284  loss_rpn_cls: 0.005  loss_rpn_loc: 0.017  time: 0.4549  data_time: 0.0162  lr: 0.000250  max_mem: 7096M
[09/26 20:57:52 d2.utils.events]:  eta: 0:00:09  iter: 9979  total_loss: 0.484  loss_cls: 0.131  loss_box_reg: 0.284  loss_rpn_cls: 0.003  loss_rpn_loc: 0.027  time: 0.4549  data_time: 0.0072  lr: 0.000250  max_mem: 7096M
[ INFO ] Saving checkpoint to ./output/model_0009999.pth
[ INFO ] Saving checkpoint to ./output/model_final.pth
[09/26 20:58:03 d2.utils.events]:  eta: 0:00:00  iter: 9999  total_loss: 0.400  loss_cls: 0.115  loss_box_reg: 0.251  loss_rpn_cls: 0.003  loss_rpn_loc: 0.022  time: 0.4548  data_time: 0.0060  lr: 0.000250  max_mem: 7096M
[09/26 20:58:03 d2.engine.hooks]: Overall training speed: 4997 iterations in 0:37:52 (0.4549 s / it)
[09/26 20:58:03 d2.engine.hooks]: Total training time: 0:37:59 (0:00:06 on hooks)
In [ ]:
# Look at training curves in tensorboard:
%load_ext tensorboard
%tensorboard --logdir output
In [ ]:
from detectron2.evaluation import COCOEvaluator, inference_on_dataset
from detectron2.data import build_detection_test_loader
evaluator = COCOEvaluator(meta + "_val", cfg, False, output_dir="./output/")
val_loader = build_detection_test_loader(cfg, meta + "_val")
print(inference_on_dataset(trainer.model, val_loader, evaluator))
# another equivalent way to evaluate the model is to use `trainer.test`
[09/26 21:03:23 d2.evaluation.coco_evaluation]: 'covid_2_val' is not registered by `register_coco_instances`. Therefore trying to convert it to COCO format ...
[09/26 21:03:23 d2.data.datasets.coco]: Converting annotations of dataset 'covid_2_val' to COCO format ...)
[09/26 21:03:23 d2.data.datasets.coco]: Converting dataset dicts into COCO format
[09/26 21:03:25 d2.data.datasets.coco]: Conversion finished, #images: 2646, #annotations: 11775
[09/26 21:03:25 d2.data.datasets.coco]: Caching COCO format annotations at './output/covid_2_val_coco_format.json' ...
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-37-dd38c2dde33e> in <module>()
      1 from detectron2.evaluation import COCOEvaluator, inference_on_dataset
      2 from detectron2.data import build_detection_test_loader
----> 3 evaluator = COCOEvaluator(meta + "_val", cfg, False, output_dir="./output/")
      4 val_loader = build_detection_test_loader(cfg, meta + "_val")
      5 print(inference_on_dataset(trainer.model, val_loader, evaluator))

/usr/local/lib/python3.6/dist-packages/detectron2/evaluation/coco_evaluation.py in __init__(self, dataset_name, cfg, distributed, output_dir)
     75             cache_path = os.path.join(output_dir, f"{dataset_name}_coco_format.json")
     76             self._metadata.json_file = cache_path
---> 77             convert_to_coco_json(dataset_name, cache_path)
     78 
     79         json_file = PathManager.get_local_path(self._metadata.json_file)

/usr/local/lib/python3.6/dist-packages/detectron2/data/datasets/coco.py in convert_to_coco_json(dataset_name, output_file, allow_cached)
    433             logger.info(f"Caching COCO format annotations at '{output_file}' ...")
    434             with PathManager.open(output_file, "w") as f:
--> 435                 json.dump(coco_dict, f)
    436 
    437 

/usr/lib/python3.6/json/__init__.py in dump(obj, fp, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
    177     # could accelerate with writelines in some versions of Python, at
    178     # a debuggability cost
--> 179     for chunk in iterable:
    180         fp.write(chunk)
    181 

/usr/lib/python3.6/json/encoder.py in _iterencode(o, _current_indent_level)
    428             yield from _iterencode_list(o, _current_indent_level)
    429         elif isinstance(o, dict):
--> 430             yield from _iterencode_dict(o, _current_indent_level)
    431         else:
    432             if markers is not None:

/usr/lib/python3.6/json/encoder.py in _iterencode_dict(dct, _current_indent_level)
    402                 else:
    403                     chunks = _iterencode(value, _current_indent_level)
--> 404                 yield from chunks
    405         if newline_indent is not None:
    406             _current_indent_level -= 1

/usr/lib/python3.6/json/encoder.py in _iterencode_list(lst, _current_indent_level)
    323                 else:
    324                     chunks = _iterencode(value, _current_indent_level)
--> 325                 yield from chunks
    326         if newline_indent is not None:
    327             _current_indent_level -= 1

/usr/lib/python3.6/json/encoder.py in _iterencode_dict(dct, _current_indent_level)
    402                 else:
    403                     chunks = _iterencode(value, _current_indent_level)
--> 404                 yield from chunks
    405         if newline_indent is not None:
    406             _current_indent_level -= 1

/usr/lib/python3.6/json/encoder.py in _iterencode_list(lst, _current_indent_level)
    323                 else:
    324                     chunks = _iterencode(value, _current_indent_level)
--> 325                 yield from chunks
    326         if newline_indent is not None:
    327             _current_indent_level -= 1

/usr/lib/python3.6/json/encoder.py in _iterencode_list(lst, _current_indent_level)
    323                 else:
    324                     chunks = _iterencode(value, _current_indent_level)
--> 325                 yield from chunks
    326         if newline_indent is not None:
    327             _current_indent_level -= 1

/usr/lib/python3.6/json/encoder.py in _iterencode(o, _current_indent_level)
    435                     raise ValueError("Circular reference detected")
    436                 markers[markerid] = o
--> 437             o = _default(o)
    438             yield from _iterencode(o, _current_indent_level)
    439             if markers is not None:

/usr/lib/python3.6/json/encoder.py in default(self, o)
    178         """
    179         raise TypeError("Object of type '%s' is not JSON serializable" %
--> 180                         o.__class__.__name__)
    181 
    182     def encode(self, o):

TypeError: Object of type 'float32' is not JSON serializable

Inference¶

In [ ]:
# cfg already contains everything we've set previously. Now we changed it a little bit for inference:
print(os.path.join(cfg.OUTPUT_DIR, "model_final.pth"))
cfg.MODEL.WEIGHTS = os.path.join(cfg.OUTPUT_DIR, "model_final.pth")  # path to the model we just trained
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.7   # set a custom testing threshold
predictor = DefaultPredictor(cfg)
./output/model_final.pth
[ INFO ] Loading checkpoint from ./output/model_final.pth
In [ ]:
from detectron2.utils.visualizer import ColorMode

def draw_predictions(img, output):
    pred_boxes = output['instances'].pred_boxes
    scores = output['instances'].scores.cpu().numpy()
    pred_classes = output['instances'].pred_classes.cpu().numpy()
    for i in range(len(scores)):
        box = pred_boxes[i].__dict__['tensor'].cpu().numpy()[0]
        x1, y1, x2, y2 = box
        _ = cv2.rectangle(img, (int(x1), int(y1)), (int(x2), int(y2)), (255, 255, 255) , 1)
        text = "%s %d %%" % (str(pred_classes[i]), scores[i] * 100)
        cv2.putText(img, text, (int(x1), int(y1)), cv2.FONT_HERSHEY_SIMPLEX, 0.3, (255, 255, 255), 1, cv2.LINE_AA)
    cv2_imshow(img)

for d in random.sample(dataset_dicts_all, 3):    
    im = cv2.imread(d["file_name"])
    print(d["file_name"])
    outputs = predictor(im)  # format is documented at https://detectron2.readthedocs.io/tutorials/models.html#model-output-format
    v = Visualizer(im[:, :, ::-1],
                   metadata=val_metadata, 
                   scale=0.5, 
                   instance_mode=ColorMode.IMAGE_BW   # remove the colors of unsegmented pixels. This option is only available for segmentation models
    )
    print(outputs)
    draw_predictions(im, outputs)
    # out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
    # cv2_imshow(out.get_image()[:, :, ::-1])
/content/dataset/medical-masks-dataset/medical-masks-dataset/images/1125506397_15801322207071n.jpg
{'instances': Instances(num_instances=15, image_height=635, image_width=899, fields=[pred_boxes: Boxes(tensor([[549.5477,  24.4249, 777.0813, 606.5373],
        [406.5891, 130.3370, 548.8791, 596.0851],
        [736.9492,  22.1274, 896.8217, 515.8023],
        [131.8607,  56.3417, 306.3305, 588.8914],
        [320.4991,  61.5864, 462.7468, 570.7958],
        [ 75.3074,  46.6702, 215.1190, 346.2160],
        [214.6271,  76.3933, 289.0464, 150.2677],
        [629.7906,  31.4709, 709.3639, 115.3872],
        [444.0869, 148.3408, 513.3357, 219.5649],
        [774.9694,  21.7772, 839.8033,  86.9091],
        [ 30.7282, 121.2731,  73.0419, 253.4355],
        [378.6835,  62.8079, 446.1783, 134.4642],
        [138.4060,  38.2904, 198.4529, 104.2608],
        [300.9983,  97.3989, 378.8621, 317.3907],
        [534.6266, 110.6356, 590.0101, 266.4051]], device='cuda:0')), scores: tensor([0.9986, 0.9972, 0.9971, 0.9963, 0.9936, 0.9778, 0.9773, 0.9731, 0.9709,
        0.9707, 0.9702, 0.9627, 0.9383, 0.7639, 0.7353], device='cuda:0'), pred_classes: tensor([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0], device='cuda:0')])}
/content/dataset/medical-masks/medical_masks/train/images/aizootech_train_4886.png
{'instances': Instances(num_instances=2, image_height=681, image_width=1024, fields=[pred_boxes: Boxes(tensor([[386.1239, 147.2070, 604.2949, 656.5264],
        [488.8533, 148.0802, 562.8668, 225.4858]], device='cuda:0')), scores: tensor([0.9981, 0.9374], device='cuda:0'), pred_classes: tensor([0, 2], device='cuda:0')])}
/content/dataset/medical-masks-dataset/medical-masks-dataset/images/d11adb544ff4a18148366dd92e645cb1.jpg
{'instances': Instances(num_instances=14, image_height=720, image_width=1280, fields=[pred_boxes: Boxes(tensor([[9.8818e+02, 2.8694e+01, 1.2063e+03, 7.0874e+02],
        [5.1234e+02, 3.4468e+01, 7.5613e+02, 6.3895e+02],
        [2.9934e+02, 4.5512e+01, 4.8072e+02, 4.4824e+02],
        [1.0927e+02, 2.3782e+01, 2.8302e+02, 4.5391e+02],
        [7.7553e+02, 2.8111e+01, 9.6445e+02, 5.9007e+02],
        [8.4804e+02, 4.0791e+01, 9.2638e+02, 1.1678e+02],
        [1.0673e+03, 4.9502e+01, 1.1517e+03, 1.3813e+02],
        [5.9765e+02, 4.4883e+01, 6.7966e+02, 1.2299e+02],
        [7.3411e+02, 1.1254e+02, 7.9469e+02, 2.2143e+02],
        [1.5265e+02, 3.0616e+01, 2.2283e+02, 9.1085e+01],
        [3.7782e+02, 2.8448e+00, 4.4597e+02, 6.1590e+01],
        [7.4838e-01, 3.4318e+01, 2.7863e+01, 2.1701e+02],
        [5.5287e+02, 4.3921e+01, 6.0099e+02, 1.0391e+02],
        [2.1966e+02, 2.9881e+01, 2.8379e+02, 2.3255e+02]], device='cuda:0')), scores: tensor([0.9991, 0.9989, 0.9977, 0.9971, 0.9914, 0.9814, 0.9690, 0.9623, 0.9515,
        0.9490, 0.9363, 0.8368, 0.8033, 0.7500], device='cuda:0'), pred_classes: tensor([0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0], device='cuda:0')])}
In [ ]:
!cp -Rv output "/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k"
'output' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k'
'output/metrics.json' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/metrics.json'
'output/events.out.tfevents.1601140189.42bfe2eb1383.883.0' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601140189.42bfe2eb1383.883.0'
'output/model_final.pth' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/model_final.pth'
'output/last_checkpoint' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/last_checkpoint'
'output/covid_1_val_coco_format.json.lock' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/covid_1_val_coco_format.json.lock'
'output/covid_1_val_coco_format.json' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/covid_1_val_coco_format.json'
'output/events.out.tfevents.1601150072.53d94c9ea253.100.0' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601150072.53d94c9ea253.100.0'
'output/events.out.tfevents.1601151412.53d94c9ea253.100.1' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601151412.53d94c9ea253.100.1'
'output/events.out.tfevents.1601151511.53d94c9ea253.100.2' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601151511.53d94c9ea253.100.2'
'output/events.out.tfevents.1601151533.53d94c9ea253.100.3' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601151533.53d94c9ea253.100.3'
'output/events.out.tfevents.1601151572.53d94c9ea253.100.4' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601151572.53d94c9ea253.100.4'
'output/model_0004999.pth' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/model_0004999.pth'
'output/events.out.tfevents.1601151603.53d94c9ea253.100.5' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/events.out.tfevents.1601151603.53d94c9ea253.100.5'
'output/model_0009999.pth' -> '/content/drive/My Drive/datasets/facemask/detectron_d5_person+mask_faster_rcnn_R_50_FPN_1x_10k/model_0009999.pth'

Verify model and saving and reloading from disk¶

In [ ]:
!wget http://images.cocodataset.org/val2017/000000439715.jpg -q -O input.jpg
img = cv2.imread("./input.jpg")
outputs = predictor(img)  # format is documented at https://detectron2.readthedocs.io/tutorials/models.html#model-output-format
v = Visualizer(img[:, :, ::-1],
                metadata=val_metadata, 
                scale=0.5, 
                instance_mode=ColorMode.IMAGE_BW   # remove the colors of unsegmented pixels. This option is only available for segmentation models
)
# out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
# cv2_imshow(out.get_image()[:, :, ::-1])
draw_predictions(img, outputs)
In [ ]:
import torch
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.modeling import build_model
cfg = get_cfg()

cfg.merge_from_file(model_zoo.get_config_file(model_yaml))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = .7
cfg.MODEL.DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
cfg.MODEL.ROI_HEADS.NUM_CLASSES = len(classes)
# cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")  # Let training initialize from model zoo
cfg.MODEL.WEIGHTS = './output/model_final.pth'

predictor_face = DefaultPredictor(cfg)

model = build_model(cfg)
_ = DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS)
[ INFO ] Loading checkpoint from ./output/model_final.pth
[ INFO ] Loading checkpoint from ./output/model_final.pth
In [ ]:
img = cv2.imread("input.jpg")
# img = cv2.imread("Medical mask/Medical mask/Medical Mask/images/3513.png")
print(img.shape)

height, width = img.shape[:2]        
img_ = np.transpose([2, 0, 1])
dev = "cuda:0" if torch.cuda.is_available() else "cpu"
device = torch.device(dev)
img_ = torch.from_numpy(img_)

d = {'image': img_, 'height': height, 'width': width}
# face_model.eval()
with torch.no_grad():
    output = predictor_face(img)
    # output = face_model([d])
  
# scores = output[0]['instances'].scores.cpu().numpy()
# predictions = output[0]['instances'].pred_classes.cpu().numpy()

scores = output['instances'].scores.cpu().numpy()
predictions = output['instances'].pred_classes.cpu().numpy()
# print(output['instances'])
for i in range(len(scores)):
    #box = output[0]['instances'].pred_boxes[i].__dict__['tensor'].cpu().numpy()[0]
    box = output['instances'].pred_boxes[i].__dict__['tensor'].cpu().numpy()[0]

    x1, y1, x2, y2 = box
    # print(x1, y1, x2, y2)
    # print(box)

    _ = cv2.rectangle(img, (int(x1), int(y1)), (int(x2), int(y2)), (255, 0, 0), 1)
    # _ = cv2.circle(img, mid, 3, (0, 255, 0), -1)
    text = "%s %d %%" % (classes[predictions[i]], int(scores[i] * 100))
    _ = cv2.rectangle(img, (int(x1), int(y1 + 3)), (int(x1 + 100), int(y1 -8)), (0,0,0), -1)
    cv2.putText(img, text, (x1, y1), cv2.FONT_HERSHEY_SIMPLEX, 0.3, (255, 255, 255), 1, cv2.LINE_AA)

img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)

import matplotlib.pyplot as plt
plt.figure(figsize=(20,10))
plt.imshow(img)
plt.show()
(480, 640, 3)